comment
stringlengths
1
45k
method_body
stringlengths
23
281k
target_code
stringlengths
0
5.16k
method_body_after
stringlengths
12
281k
context_before
stringlengths
8
543k
context_after
stringlengths
8
543k
yes, 255 becomes -1, fixed in test. not sure what you want with inversion, I can easily make this: ``` input: tensor<float>(d1[6],d2[8]):[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0], [0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]] packed: tensor<int8>(dt{},x[1]):{0:1.0, 1:5.0, 2:3.0, 3:127.0, 4:-128.0, 5:-1.0} unpacked: tensor<float>(dt{},x[8]):{0:[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], 1:[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0], 2:[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0], 3:[0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 4:[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 5:[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]} input: tensor<float>(d1[2],d2[16]):[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]] packed: tensor<int8>(dt{},x[2]):{0:[1.0, -128.0], 1:[5.0, 1.0]} unpacked: tensor<float>(dt{},x[16]):{0:[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 1:[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]} ```
public void testUnpack() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "0,0,0,0, 0,0,0,1," + "1,1,1,1, 1,0,0,0]}", "unpack_bits_from_int8(tensor0, float, big)", "tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}"); tester.assertEvaluates("tensor<int8>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "1,0,0,0, 0,0,0,0," + "0,0,0,1, 1,1,1,1]}", "unpack_bits_from_int8(tensor0, int8, little)", "tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}"); }
"tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}");
public void testUnpack() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "0,0,0,0, 0,0,0,1," + "1,1,1,1, 1,0,0,0]}", "unpack_bits(tensor0, float, big)", "tensor<int8>(a{},x[2]):{foo:[0,-1],bar:[1,-8]}"); tester.assertEvaluates("tensor<int8>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "1,0,0,0, 0,0,0,0," + "0,0,0,1, 1,1,1,1]}", "unpack_bits(tensor0, int8, little)", "tensor<int8>(a{},x[2]):{foo:[0,-1],bar:[1,-8]}"); }
class EvaluationTestCase { private final double tolerance = 0.000001; private void verifyStringValueToString(String s) { s = '"' + s + '"'; Value val = Value.parse(s); assertTrue(val instanceof StringValue); assertEquals(s, val.toString()); } @Test public void testStringValueToString() { verifyStringValueToString(""); verifyStringValueToString("something"); verifyStringValueToString("needs \\\" escape"); verifyStringValueToString("\\\\"); verifyStringValueToString("\\\""); verifyStringValueToString("\\f"); verifyStringValueToString("\\female"); verifyStringValueToString("\\n"); verifyStringValueToString("\\nude"); verifyStringValueToString("\\r"); verifyStringValueToString("fa\\rt"); verifyStringValueToString("\\t"); verifyStringValueToString("fe\\tish"); verifyStringValueToString("\\f"); verifyStringValueToString("\\\\hx"); verifyStringValueToString("\\\\xx"); verifyStringValueToString("\\\\x10081977"); } @Test public void testEvaluationOrder() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(-4, "1 + -2 + -3"); tester.assertEvaluates(2, "1 - (2 - 3)"); tester.assertEvaluates(-4, "(1 - 2) - 3"); tester.assertEvaluates(-4, "1 - 2 - 3"); } @Test public void testEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(0.5, "0.5"); tester.assertEvaluates(-0.5, "-0.5"); tester.assertEvaluates(0.5, "one_half"); tester.assertEvaluates(-0.5, "-one_half"); tester.assertEvaluates(0, "nonexisting"); tester.assertEvaluates(0.75, "0.5 + 0.25"); tester.assertEvaluates(0.75, "one_half + a_quarter"); tester.assertEvaluates(1.25, "0.5 - 0.25 + one"); tester.assertEvaluates(9.0, "3 ^ 2"); tester.assertEvaluates(1, "if(\"a\"==\"a\",1,0)"); tester.assertEvaluates(26, "2*3+4*5"); tester.assertEvaluates(1, "2/6+4/6"); tester.assertEvaluates(2 * 3 * 4 + 3 * 4 * 5 - 4 * 200 / 10, "2*3*4+3*4*5-4*200/10"); tester.assertEvaluates(3, "1 + 10 % 6 / 2"); tester.assertEvaluates(10.0, "3 ^ 2 + 1"); tester.assertEvaluates(18.0, "2 * 3 ^ 2"); tester.assertEvaluates(-4, "1 - 2 - 3"); tester.assertEvaluates(Math.pow(4, 9), "4^3^2"); tester.assertEvaluates(2 * (3 * 4 + 3) * (4 * 5 - 4 * 200) / 10, "2*(3*4+3)*(4*5-4*200)/10"); tester.assertEvaluates(0.5, "if( 2<3, one_half, one_quarter)"); tester.assertEvaluates(0.25,"if( 2>3, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1==1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1>=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 0.33333333333333333333~=1/3, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 0.33333333333333333333~=1/35, one_half, a_quarter)"); tester.assertEvaluates(5.5, "if(one_half in [one_quarter,one_half], one_half+5,log(one_quarter) * one_quarter)"); tester.assertEvaluates(0.5, "if( 1 in [1,2 , 3], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1 in [ 2,3,4], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( \"foo\" in [\"baz\",\"boz\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( one in [0, 1, 2], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( one in [2], one_half, a_quarter)"); tester.assertEvaluates(2.5, "if(1.0, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(0.0, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1.0-1.1, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5)"); RankingExpression e = tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5, 0.3)"); assertEquals(0.3d, ((IfNode) e.getRoot()).getTrueProbability(), tolerance); tester.assertEvaluates(new BooleanValue(true), "2<3"); tester.assertEvaluates(new BooleanValue(false), "2>3"); tester.assertEvaluates(new BooleanValue(false), "if (3>2, 2>3, 5.0)"); tester.assertEvaluates(new BooleanValue(true), "2>3<1"); tester.assertEvaluates(2.5, "if(2>3<1, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1+1>3<1+0, 2.5, 3.5)"); tester.assertEvaluates(0, "sin(0)"); tester.assertEvaluates(1, "cos(0)"); tester.assertEvaluates(8, "pow(4/2,min(cos(0)*3,5))"); tester.assertEvaluates(0, "random(1)"); tester.assertEvaluates(0, "random(foo)"); tester.assertEvaluates(1.25, "5*if(1>=1.1, one_half, if(min(1,2)<max(1,2),if (\"foo\" in [\"foo\",\"bar\"],a_quarter,3000), 0.57345347))"); } @Test public void testBooleanEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(false, "false"); tester.assertEvaluates(true, "true"); tester.assertEvaluates(false, "0 && 0"); tester.assertEvaluates(false, "0 && 1"); tester.assertEvaluates(false, "1 && 0"); tester.assertEvaluates(true, "1 && 1"); tester.assertEvaluates(true, "1 && 2"); tester.assertEvaluates(true, "1 && 0.1"); tester.assertEvaluates(false, "0 || 0"); tester.assertEvaluates(true, "0 || 0.1"); tester.assertEvaluates(true, "0 || 1"); tester.assertEvaluates(true, "1 || 0"); tester.assertEvaluates(true, "1 || 1"); tester.assertEvaluates(true, "!0"); tester.assertEvaluates(false, "!1"); tester.assertEvaluates(false, "!2"); tester.assertEvaluates(true, "!0 && 1"); tester.assertEvaluates(0, "2 * (0 && 1)"); tester.assertEvaluates(2, "2 * (1 && 1)"); tester.assertEvaluates(true, "2 + 0 && 1"); tester.assertEvaluates(true, "1 && 0 + 2"); } @Test @Test public void testMapSubspaces() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t))", "tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}"); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t+2))", "tensor<float>(a{},x[2]):{foo:[0,1],bar:[5,8]}"); tester.assertEvaluates("tensor<float>(a{},y[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor<float>(y[2])(t{x:(y)}+t{x:(y+1)})))", "tensor(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); tester.assertEvaluates("tensor<double>(a{},x[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor(x[2])(t{x:(x)}+t{x:(x+1)})))", "tensor<float>(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); } @Test public void testTensorEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("{}", "tensor0", "{}"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "map(tensor0, f(x) (log10(x)))", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:4, {d1:1}:9, {d1:2 }:16 }", "map(tensor0, f(x) (x * x))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "tensor0 == 3", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "3 == tensor0", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "log10(tensor0)", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:-100, {d1:2 }:-1000 }", "- tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:0, {d1:2 }:0 }", "min(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:0, {d1:2 }:10 }", "max(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "tensor0 % 2 == map(tensor0, f(x) (x % 2))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 || 1) == map(tensor0, f(x) (x || 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 && 1) == map(tensor0, f(x) (x && 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "!tensor0 == map(tensor0, f(x) (!x))", "{ {d1:0}:0, {d1:1}:1, {d1:2}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "abs(tensor0)", "{ {x:0}:1, {x:1}:-2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "acos(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "asin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "atan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "ceil(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cos(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cosh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "elu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "exp(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "fabs(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "floor(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "isNan(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "log(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1 }", "log10(tensor0)", "{ {x:0}:1, {x:1}:10 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:2 }", "fmod(tensor0, 3)","{ {x:0}:3, {x:1}:8 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:8 }", "pow(tensor0, 3)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:8, {x:1}:16 }", "ldexp(tensor0,3.1)","{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "relu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "round(tensor0)", "{ {x:0}:1, {x:1}:1.8 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "sigmoid(tensor0)","{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:-1 }", "sign(tensor0)", "{ {x:0}:3, {x:1}:-5 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sinh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:4 }", "square(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:3 }", "sqrt(tensor0)", "{ {x:0}:1, {x:1}:9 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tanh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, count, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:7 }", "reduce(tensor0, max, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, median, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:1 }", "reduce(tensor0, min, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:105 }", "reduce(tensor0, prod, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "reduce(tensor0, sum, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:6 }", "reduce(tensor0, avg, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:2 }", "reduce(tensor0, count, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:35 }", "reduce(tensor0, prod, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12 }", "reduce(tensor0, sum, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:7 }", "reduce(tensor0, max, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:1, {y:1}:5 }", "reduce(tensor0, min, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: 5 }", "sum(tensor0)", "5.0"); tester.assertEvaluates("{ {}:-5 }", "sum(tensor0)", "-5.0"); tester.assertEvaluates("{ {}:12.5 }", "sum(tensor0)", "{ {d1:0}:5.5, {d1:1}:7.0 }"); tester.assertEvaluates("{ {}: 0 }", "sum(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {}: 8.0 }", "avg(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:12.0}"); tester.assertEvaluates("{ {}: 5.0 }", "median(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12.0 }", "sum(tensor0, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {x:0}:6, {x:1}:10.0 }", "sum(tensor0, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "sum(tensor0, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: -1 }", "reduce(tensor0, max)", "tensor(x[2]):[-2,-1]"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "join(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:6, {x:1,y:0}:14 }", "join(tensor0, tensor1, f(x,y) (x+x))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0}:2, {x:1}:-3 }", "join(tensor0, tensor1, f(x,y) (y-x))", "{ {x:0}:3, {x:1}:7 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "tensor0 * tensor0", "{}"); tester.assertEvaluates("{{x:0,y:0,z:0}:0.0}", "( tensor0 * tensor1 ) * ( tensor2 * tensor1 )", "{{x:0}:1}", "{}", "{{y:0,z:0}:1}"); tester.assertEvaluates("tensor(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor(x{}):{ {x:1}:5 }"); tester.assertEvaluates("tensor<double>(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor<float>(x{}):{ {x:1}:5 }"); tester.assertEvaluates("{ {x:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {x:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "tensor0 * tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:8, {x:1,y:0}:12 }", "tensor0 + tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:-2, {x:1,y:0}:2 }", "tensor0 - tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:4 }", "tensor0 / tensor1", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:7 }", "max(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "min(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "pow(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "tensor0 ^ tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "fmod(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "tensor0 % tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:96, {x:1,y:0}:224 }", "ldexp(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5.1 }"); tester.assertEvaluates("{ {x:0,y:0,z:0}:7, {x:0,y:0,z:1}:13, {x:1,y:0,z:0}:21, {x:1,y:0,z:1}:39, {x:0,y:1,z:0}:55, {x:0,y:1,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:0 }", "tensor0 * tensor1", "{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5, {x:1,y:1}:0 }", "{ {y:0,z:0}:7, {y:1,z:0}:11, {y:0,z:1}:13, {y:1,z:1}:0 }"); tester.assertEvaluates("{ {x:0,y:1,z:0}:35, {x:0,y:1,z:1}:65 }", "tensor0 * tensor1", "tensor(x{},y{}):{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5 }", "tensor(y{},z{}):{ {y:1,z:0}:7, {y:2,z:0}:11, {y:1,z:1}:13 })"); tester.assertEvaluates("{{x:0,y:0}:0.0}","tensor1 * tensor2 * tensor3", "{ {x:0}:1 }", "{ {x:1,y:0}:1, {x:0,y:0}:1 }", "{ {x:0,y:0}:1 }"); tester.assertEvaluates("{ {d1:0}:50, {d1:1}:500, {d1:2}:5000 }", "5 * tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:13, {d1:1}:103, {d1:2}:1003 }", "tensor0 + 3","{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:10, {d1:2 }:100 }", "tensor0 / 10", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {h:0}:1.5, {h:1}:1.5 }", "0.5 + tensor0", "{ {h:0}:1.0,{h:1}:1.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0 }", "atan2(tensor0, tensor1)", "{ {x:0}:0, {x:1}:0 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:2, {x:1,y:0}:7 }", "hamming(tensor0, tensor1)", "{ {x:0}:97, {x:1}:-1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 > tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 < tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 >= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 <= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 == tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 ~= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0 }", "tensor0 in [1,2,3]", "{ {x:0}:3, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:0.1 }", "join(tensor0, 0.1, f(x,y) (x*y))", "{ {x:0}:1 }"); tester.assertEvaluates("{ {x:0}:15, {x:1}:4 }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{}"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {newX:0,y:0}:3 }", "rename(tensor0, x, newX)", "{ {x:0,y:0}:3.0 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "rename(tensor0, (x, y), (y, x))", "{ {x:0,y:0}:3.0, {x:0,y:1}:5.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0, {x:0,y:1}:1, {x:1,y:1}:0, {x:0,y:2}:0, {x:1,y:2}:1 }", "tensor(x[2],y[3])(x+1==y)"); tester.assertEvaluates("{ {y:0,x:0}:0, {y:1,x:0}:0, {y:0,x:1}:1, {y:1,x:1}:0, {y:0,x:2}:0, {y:1,x:2}:1 }", "tensor(y[2],x[3])(y+1==x)"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1 }", "tensor(x[1],y[1],z[1])((x==y)*(y==z))"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1, {x:2}:2 }", "range(x[3])"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1, {x:0,y:0,z:1}:0, {x:0,y:1,z:0}:0, {x:0,y:1,z:1}:0, {x:1,y:0,z:0}:0, {x:1,y:0,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:1, }", "diag(x[2],y[2],z[2])"); tester.assertEvaluates("6", "reduce(random(x[2],y[3]), count)"); tester.assertEvaluates("tensor(x[2]):[0.0, 2.0]", "tensor(x[2]):{{x:0}:tensor(y[2]):{{y:0}:((0+0)+a)," + "{y:1}:((0+1)+a)}{y:0}," + "{x:1}:tensor(y[2]):{{y:0}:((1+0)+a)," + "{y:1}:((1+1)+a)}{y:1}" + "}"); tester.assertEvaluates("3.0", "tensor0{x:1}", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("1.2", "tensor0{key:foo,x:0}", true, "{ {key:foo,x:0}:1.2, {key:bar,x:0}:3 }"); tester.assertEvaluates("3.0", "tensor0{bar}", true, "{ {x:foo}:1, {x:bar}:3 }"); tester.assertEvaluates("3.3", "tensor0[2]", "tensor(values[4]):[1.1, 2.2, 3.3, 4.4]]"); tester.assertEvaluates("tensor(x[5]):[0, 1, 2, 3, 4]", "concat(tensor0, tensor1, x)", "tensor(x[2]):[0, 1]", "tensor(x[3]):[2, 3, 4])"); tester.assertEvaluates("{ {x:0}:0.25, {x:1}:0.75 }", "l1_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {x:0}:0.31622776601683794, {x:1}:0.9486832980505138 }", "l2_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {y:0}:81.0 }", "matmul(tensor0, tensor1, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "softmax(tensor0, x)", "{ {x:0}:1, {x:1}:1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:81.0, {x:1,y:0}:88.0 }", "xw_plus_b(tensor0, tensor1, tensor2, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }", "{ {x:0}:0, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0, {x:2}:0, {x:3}:1 }", "argmax(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0, {x:2}:1, {x:3}:0 }", "argmin(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("tensor(y{}):{{y:6}:0}}", "matmul(tensor0, diag(x[5],y[7]), x)", "tensor(x{},y{}):{{x:4,y:6}:1})"); tester.assertEvaluates("tensor(y{}):{{y:6}:10}}
class EvaluationTestCase { private final double tolerance = 0.000001; private void verifyStringValueToString(String s) { s = '"' + s + '"'; Value val = Value.parse(s); assertTrue(val instanceof StringValue); assertEquals(s, val.toString()); } @Test public void testStringValueToString() { verifyStringValueToString(""); verifyStringValueToString("something"); verifyStringValueToString("needs \\\" escape"); verifyStringValueToString("\\\\"); verifyStringValueToString("\\\""); verifyStringValueToString("\\f"); verifyStringValueToString("\\female"); verifyStringValueToString("\\n"); verifyStringValueToString("\\nude"); verifyStringValueToString("\\r"); verifyStringValueToString("fa\\rt"); verifyStringValueToString("\\t"); verifyStringValueToString("fe\\tish"); verifyStringValueToString("\\f"); verifyStringValueToString("\\\\hx"); verifyStringValueToString("\\\\xx"); verifyStringValueToString("\\\\x10081977"); } @Test public void testEvaluationOrder() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(-4, "1 + -2 + -3"); tester.assertEvaluates(2, "1 - (2 - 3)"); tester.assertEvaluates(-4, "(1 - 2) - 3"); tester.assertEvaluates(-4, "1 - 2 - 3"); } @Test public void testEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(0.5, "0.5"); tester.assertEvaluates(-0.5, "-0.5"); tester.assertEvaluates(0.5, "one_half"); tester.assertEvaluates(-0.5, "-one_half"); tester.assertEvaluates(0, "nonexisting"); tester.assertEvaluates(0.75, "0.5 + 0.25"); tester.assertEvaluates(0.75, "one_half + a_quarter"); tester.assertEvaluates(1.25, "0.5 - 0.25 + one"); tester.assertEvaluates(9.0, "3 ^ 2"); tester.assertEvaluates(1, "if(\"a\"==\"a\",1,0)"); tester.assertEvaluates(26, "2*3+4*5"); tester.assertEvaluates(1, "2/6+4/6"); tester.assertEvaluates(2 * 3 * 4 + 3 * 4 * 5 - 4 * 200 / 10, "2*3*4+3*4*5-4*200/10"); tester.assertEvaluates(3, "1 + 10 % 6 / 2"); tester.assertEvaluates(10.0, "3 ^ 2 + 1"); tester.assertEvaluates(18.0, "2 * 3 ^ 2"); tester.assertEvaluates(-4, "1 - 2 - 3"); tester.assertEvaluates(Math.pow(4, 9), "4^3^2"); tester.assertEvaluates(2 * (3 * 4 + 3) * (4 * 5 - 4 * 200) / 10, "2*(3*4+3)*(4*5-4*200)/10"); tester.assertEvaluates(0.5, "if( 2<3, one_half, one_quarter)"); tester.assertEvaluates(0.25,"if( 2>3, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1==1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1>=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 0.33333333333333333333~=1/3, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 0.33333333333333333333~=1/35, one_half, a_quarter)"); tester.assertEvaluates(5.5, "if(one_half in [one_quarter,one_half], one_half+5,log(one_quarter) * one_quarter)"); tester.assertEvaluates(0.5, "if( 1 in [1,2 , 3], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1 in [ 2,3,4], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( \"foo\" in [\"baz\",\"boz\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( one in [0, 1, 2], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( one in [2], one_half, a_quarter)"); tester.assertEvaluates(2.5, "if(1.0, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(0.0, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1.0-1.1, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5)"); RankingExpression e = tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5, 0.3)"); assertEquals(0.3d, ((IfNode) e.getRoot()).getTrueProbability(), tolerance); tester.assertEvaluates(new BooleanValue(true), "2<3"); tester.assertEvaluates(new BooleanValue(false), "2>3"); tester.assertEvaluates(new BooleanValue(false), "if (3>2, 2>3, 5.0)"); tester.assertEvaluates(new BooleanValue(true), "2>3<1"); tester.assertEvaluates(2.5, "if(2>3<1, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1+1>3<1+0, 2.5, 3.5)"); tester.assertEvaluates(0, "sin(0)"); tester.assertEvaluates(1, "cos(0)"); tester.assertEvaluates(8, "pow(4/2,min(cos(0)*3,5))"); tester.assertEvaluates(0, "random(1)"); tester.assertEvaluates(0, "random(foo)"); tester.assertEvaluates(1.25, "5*if(1>=1.1, one_half, if(min(1,2)<max(1,2),if (\"foo\" in [\"foo\",\"bar\"],a_quarter,3000), 0.57345347))"); } @Test public void testBooleanEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(false, "false"); tester.assertEvaluates(true, "true"); tester.assertEvaluates(false, "0 && 0"); tester.assertEvaluates(false, "0 && 1"); tester.assertEvaluates(false, "1 && 0"); tester.assertEvaluates(true, "1 && 1"); tester.assertEvaluates(true, "1 && 2"); tester.assertEvaluates(true, "1 && 0.1"); tester.assertEvaluates(false, "0 || 0"); tester.assertEvaluates(true, "0 || 0.1"); tester.assertEvaluates(true, "0 || 1"); tester.assertEvaluates(true, "1 || 0"); tester.assertEvaluates(true, "1 || 1"); tester.assertEvaluates(true, "!0"); tester.assertEvaluates(false, "!1"); tester.assertEvaluates(false, "!2"); tester.assertEvaluates(true, "!0 && 1"); tester.assertEvaluates(0, "2 * (0 && 1)"); tester.assertEvaluates(2, "2 * (1 && 1)"); tester.assertEvaluates(true, "2 + 0 && 1"); tester.assertEvaluates(true, "1 && 0 + 2"); } @Test @Test public void testMapSubspaces() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t))", "tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}"); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t+2))", "tensor<float>(a{},x[2]):{foo:[0,1],bar:[5,8]}"); tester.assertEvaluates("tensor<float>(a{},y[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor<float>(y[2])(t{x:(y)}+t{x:(y+1)})))", "tensor(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); tester.assertEvaluates("tensor<double>(a{},x[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor(x[2])(t{x:(x)}+t{x:(x+1)})))", "tensor<float>(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); } @Test public void testTensorEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("{}", "tensor0", "{}"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "map(tensor0, f(x) (log10(x)))", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:4, {d1:1}:9, {d1:2 }:16 }", "map(tensor0, f(x) (x * x))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "tensor0 == 3", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "3 == tensor0", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "log10(tensor0)", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:-100, {d1:2 }:-1000 }", "- tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:0, {d1:2 }:0 }", "min(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:0, {d1:2 }:10 }", "max(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "tensor0 % 2 == map(tensor0, f(x) (x % 2))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 || 1) == map(tensor0, f(x) (x || 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 && 1) == map(tensor0, f(x) (x && 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "!tensor0 == map(tensor0, f(x) (!x))", "{ {d1:0}:0, {d1:1}:1, {d1:2}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "abs(tensor0)", "{ {x:0}:1, {x:1}:-2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "acos(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "asin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "atan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "ceil(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cos(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cosh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "elu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "exp(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "fabs(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "floor(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "isNan(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "log(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1 }", "log10(tensor0)", "{ {x:0}:1, {x:1}:10 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:2 }", "fmod(tensor0, 3)","{ {x:0}:3, {x:1}:8 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:8 }", "pow(tensor0, 3)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:8, {x:1}:16 }", "ldexp(tensor0,3.1)","{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "relu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "round(tensor0)", "{ {x:0}:1, {x:1}:1.8 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "sigmoid(tensor0)","{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:-1 }", "sign(tensor0)", "{ {x:0}:3, {x:1}:-5 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sinh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:4 }", "square(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:3 }", "sqrt(tensor0)", "{ {x:0}:1, {x:1}:9 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tanh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, count, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:7 }", "reduce(tensor0, max, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, median, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:1 }", "reduce(tensor0, min, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:105 }", "reduce(tensor0, prod, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "reduce(tensor0, sum, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:6 }", "reduce(tensor0, avg, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:2 }", "reduce(tensor0, count, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:35 }", "reduce(tensor0, prod, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12 }", "reduce(tensor0, sum, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:7 }", "reduce(tensor0, max, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:1, {y:1}:5 }", "reduce(tensor0, min, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: 5 }", "sum(tensor0)", "5.0"); tester.assertEvaluates("{ {}:-5 }", "sum(tensor0)", "-5.0"); tester.assertEvaluates("{ {}:12.5 }", "sum(tensor0)", "{ {d1:0}:5.5, {d1:1}:7.0 }"); tester.assertEvaluates("{ {}: 0 }", "sum(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {}: 8.0 }", "avg(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:12.0}"); tester.assertEvaluates("{ {}: 5.0 }", "median(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12.0 }", "sum(tensor0, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {x:0}:6, {x:1}:10.0 }", "sum(tensor0, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "sum(tensor0, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: -1 }", "reduce(tensor0, max)", "tensor(x[2]):[-2,-1]"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "join(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:6, {x:1,y:0}:14 }", "join(tensor0, tensor1, f(x,y) (x+x))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0}:2, {x:1}:-3 }", "join(tensor0, tensor1, f(x,y) (y-x))", "{ {x:0}:3, {x:1}:7 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "tensor0 * tensor0", "{}"); tester.assertEvaluates("{{x:0,y:0,z:0}:0.0}", "( tensor0 * tensor1 ) * ( tensor2 * tensor1 )", "{{x:0}:1}", "{}", "{{y:0,z:0}:1}"); tester.assertEvaluates("tensor(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor(x{}):{ {x:1}:5 }"); tester.assertEvaluates("tensor<double>(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor<float>(x{}):{ {x:1}:5 }"); tester.assertEvaluates("{ {x:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {x:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "tensor0 * tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:8, {x:1,y:0}:12 }", "tensor0 + tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:-2, {x:1,y:0}:2 }", "tensor0 - tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:4 }", "tensor0 / tensor1", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:7 }", "max(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "min(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "pow(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "tensor0 ^ tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "fmod(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "tensor0 % tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:96, {x:1,y:0}:224 }", "ldexp(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5.1 }"); tester.assertEvaluates("{ {x:0,y:0,z:0}:7, {x:0,y:0,z:1}:13, {x:1,y:0,z:0}:21, {x:1,y:0,z:1}:39, {x:0,y:1,z:0}:55, {x:0,y:1,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:0 }", "tensor0 * tensor1", "{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5, {x:1,y:1}:0 }", "{ {y:0,z:0}:7, {y:1,z:0}:11, {y:0,z:1}:13, {y:1,z:1}:0 }"); tester.assertEvaluates("{ {x:0,y:1,z:0}:35, {x:0,y:1,z:1}:65 }", "tensor0 * tensor1", "tensor(x{},y{}):{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5 }", "tensor(y{},z{}):{ {y:1,z:0}:7, {y:2,z:0}:11, {y:1,z:1}:13 })"); tester.assertEvaluates("{{x:0,y:0}:0.0}","tensor1 * tensor2 * tensor3", "{ {x:0}:1 }", "{ {x:1,y:0}:1, {x:0,y:0}:1 }", "{ {x:0,y:0}:1 }"); tester.assertEvaluates("{ {d1:0}:50, {d1:1}:500, {d1:2}:5000 }", "5 * tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:13, {d1:1}:103, {d1:2}:1003 }", "tensor0 + 3","{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:10, {d1:2 }:100 }", "tensor0 / 10", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {h:0}:1.5, {h:1}:1.5 }", "0.5 + tensor0", "{ {h:0}:1.0,{h:1}:1.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0 }", "atan2(tensor0, tensor1)", "{ {x:0}:0, {x:1}:0 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:2, {x:1,y:0}:7 }", "hamming(tensor0, tensor1)", "{ {x:0}:97, {x:1}:-1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 > tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 < tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 >= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 <= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 == tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 ~= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0 }", "tensor0 in [1,2,3]", "{ {x:0}:3, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:0.1 }", "join(tensor0, 0.1, f(x,y) (x*y))", "{ {x:0}:1 }"); tester.assertEvaluates("{ {x:0}:15, {x:1}:4 }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{}"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {newX:0,y:0}:3 }", "rename(tensor0, x, newX)", "{ {x:0,y:0}:3.0 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "rename(tensor0, (x, y), (y, x))", "{ {x:0,y:0}:3.0, {x:0,y:1}:5.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0, {x:0,y:1}:1, {x:1,y:1}:0, {x:0,y:2}:0, {x:1,y:2}:1 }", "tensor(x[2],y[3])(x+1==y)"); tester.assertEvaluates("{ {y:0,x:0}:0, {y:1,x:0}:0, {y:0,x:1}:1, {y:1,x:1}:0, {y:0,x:2}:0, {y:1,x:2}:1 }", "tensor(y[2],x[3])(y+1==x)"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1 }", "tensor(x[1],y[1],z[1])((x==y)*(y==z))"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1, {x:2}:2 }", "range(x[3])"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1, {x:0,y:0,z:1}:0, {x:0,y:1,z:0}:0, {x:0,y:1,z:1}:0, {x:1,y:0,z:0}:0, {x:1,y:0,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:1, }", "diag(x[2],y[2],z[2])"); tester.assertEvaluates("6", "reduce(random(x[2],y[3]), count)"); tester.assertEvaluates("tensor(x[2]):[0.0, 2.0]", "tensor(x[2]):{{x:0}:tensor(y[2]):{{y:0}:((0+0)+a)," + "{y:1}:((0+1)+a)}{y:0}," + "{x:1}:tensor(y[2]):{{y:0}:((1+0)+a)," + "{y:1}:((1+1)+a)}{y:1}" + "}"); tester.assertEvaluates("3.0", "tensor0{x:1}", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("1.2", "tensor0{key:foo,x:0}", true, "{ {key:foo,x:0}:1.2, {key:bar,x:0}:3 }"); tester.assertEvaluates("3.0", "tensor0{bar}", true, "{ {x:foo}:1, {x:bar}:3 }"); tester.assertEvaluates("3.3", "tensor0[2]", "tensor(values[4]):[1.1, 2.2, 3.3, 4.4]]"); tester.assertEvaluates("tensor(x[5]):[0, 1, 2, 3, 4]", "concat(tensor0, tensor1, x)", "tensor(x[2]):[0, 1]", "tensor(x[3]):[2, 3, 4])"); tester.assertEvaluates("{ {x:0}:0.25, {x:1}:0.75 }", "l1_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {x:0}:0.31622776601683794, {x:1}:0.9486832980505138 }", "l2_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {y:0}:81.0 }", "matmul(tensor0, tensor1, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "softmax(tensor0, x)", "{ {x:0}:1, {x:1}:1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:81.0, {x:1,y:0}:88.0 }", "xw_plus_b(tensor0, tensor1, tensor2, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }", "{ {x:0}:0, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0, {x:2}:0, {x:3}:1 }", "argmax(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0, {x:2}:1, {x:3}:0 }", "argmin(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("tensor(y{}):{{y:6}:0}}", "matmul(tensor0, diag(x[5],y[7]), x)", "tensor(x{},y{}):{{x:4,y:6}:1})"); tester.assertEvaluates("tensor(y{}):{{y:6}:10}}
Thanks, yes, that is what I meant with inversion. That we have also `pack_into_int8` (uncertain about the name. The inspiration is from [numpy.packbits](https://numpy.org/doc/stable/reference/generated/numpy.packbits.html) and [numpy.unpackbits](https://numpy.org/doc/stable/reference/generated/numpy.unpackbits.html)
public void testUnpack() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "0,0,0,0, 0,0,0,1," + "1,1,1,1, 1,0,0,0]}", "unpack_bits_from_int8(tensor0, float, big)", "tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}"); tester.assertEvaluates("tensor<int8>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "1,0,0,0, 0,0,0,0," + "0,0,0,1, 1,1,1,1]}", "unpack_bits_from_int8(tensor0, int8, little)", "tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}"); }
"tensor<int8>(a{},x[2]):{foo:[0,255],bar:[1,-8]}");
public void testUnpack() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "0,0,0,0, 0,0,0,1," + "1,1,1,1, 1,0,0,0]}", "unpack_bits(tensor0, float, big)", "tensor<int8>(a{},x[2]):{foo:[0,-1],bar:[1,-8]}"); tester.assertEvaluates("tensor<int8>(a{},x[16]):{foo:[" + "0,0,0,0, 0,0,0,0," + "1,1,1,1, 1,1,1,1" + "],bar:[" + "1,0,0,0, 0,0,0,0," + "0,0,0,1, 1,1,1,1]}", "unpack_bits(tensor0, int8, little)", "tensor<int8>(a{},x[2]):{foo:[0,-1],bar:[1,-8]}"); }
class EvaluationTestCase { private final double tolerance = 0.000001; private void verifyStringValueToString(String s) { s = '"' + s + '"'; Value val = Value.parse(s); assertTrue(val instanceof StringValue); assertEquals(s, val.toString()); } @Test public void testStringValueToString() { verifyStringValueToString(""); verifyStringValueToString("something"); verifyStringValueToString("needs \\\" escape"); verifyStringValueToString("\\\\"); verifyStringValueToString("\\\""); verifyStringValueToString("\\f"); verifyStringValueToString("\\female"); verifyStringValueToString("\\n"); verifyStringValueToString("\\nude"); verifyStringValueToString("\\r"); verifyStringValueToString("fa\\rt"); verifyStringValueToString("\\t"); verifyStringValueToString("fe\\tish"); verifyStringValueToString("\\f"); verifyStringValueToString("\\\\hx"); verifyStringValueToString("\\\\xx"); verifyStringValueToString("\\\\x10081977"); } @Test public void testEvaluationOrder() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(-4, "1 + -2 + -3"); tester.assertEvaluates(2, "1 - (2 - 3)"); tester.assertEvaluates(-4, "(1 - 2) - 3"); tester.assertEvaluates(-4, "1 - 2 - 3"); } @Test public void testEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(0.5, "0.5"); tester.assertEvaluates(-0.5, "-0.5"); tester.assertEvaluates(0.5, "one_half"); tester.assertEvaluates(-0.5, "-one_half"); tester.assertEvaluates(0, "nonexisting"); tester.assertEvaluates(0.75, "0.5 + 0.25"); tester.assertEvaluates(0.75, "one_half + a_quarter"); tester.assertEvaluates(1.25, "0.5 - 0.25 + one"); tester.assertEvaluates(9.0, "3 ^ 2"); tester.assertEvaluates(1, "if(\"a\"==\"a\",1,0)"); tester.assertEvaluates(26, "2*3+4*5"); tester.assertEvaluates(1, "2/6+4/6"); tester.assertEvaluates(2 * 3 * 4 + 3 * 4 * 5 - 4 * 200 / 10, "2*3*4+3*4*5-4*200/10"); tester.assertEvaluates(3, "1 + 10 % 6 / 2"); tester.assertEvaluates(10.0, "3 ^ 2 + 1"); tester.assertEvaluates(18.0, "2 * 3 ^ 2"); tester.assertEvaluates(-4, "1 - 2 - 3"); tester.assertEvaluates(Math.pow(4, 9), "4^3^2"); tester.assertEvaluates(2 * (3 * 4 + 3) * (4 * 5 - 4 * 200) / 10, "2*(3*4+3)*(4*5-4*200)/10"); tester.assertEvaluates(0.5, "if( 2<3, one_half, one_quarter)"); tester.assertEvaluates(0.25,"if( 2>3, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1==1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1>=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 0.33333333333333333333~=1/3, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 0.33333333333333333333~=1/35, one_half, a_quarter)"); tester.assertEvaluates(5.5, "if(one_half in [one_quarter,one_half], one_half+5,log(one_quarter) * one_quarter)"); tester.assertEvaluates(0.5, "if( 1 in [1,2 , 3], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1 in [ 2,3,4], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( \"foo\" in [\"baz\",\"boz\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( one in [0, 1, 2], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( one in [2], one_half, a_quarter)"); tester.assertEvaluates(2.5, "if(1.0, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(0.0, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1.0-1.1, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5)"); RankingExpression e = tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5, 0.3)"); assertEquals(0.3d, ((IfNode) e.getRoot()).getTrueProbability(), tolerance); tester.assertEvaluates(new BooleanValue(true), "2<3"); tester.assertEvaluates(new BooleanValue(false), "2>3"); tester.assertEvaluates(new BooleanValue(false), "if (3>2, 2>3, 5.0)"); tester.assertEvaluates(new BooleanValue(true), "2>3<1"); tester.assertEvaluates(2.5, "if(2>3<1, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1+1>3<1+0, 2.5, 3.5)"); tester.assertEvaluates(0, "sin(0)"); tester.assertEvaluates(1, "cos(0)"); tester.assertEvaluates(8, "pow(4/2,min(cos(0)*3,5))"); tester.assertEvaluates(0, "random(1)"); tester.assertEvaluates(0, "random(foo)"); tester.assertEvaluates(1.25, "5*if(1>=1.1, one_half, if(min(1,2)<max(1,2),if (\"foo\" in [\"foo\",\"bar\"],a_quarter,3000), 0.57345347))"); } @Test public void testBooleanEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(false, "false"); tester.assertEvaluates(true, "true"); tester.assertEvaluates(false, "0 && 0"); tester.assertEvaluates(false, "0 && 1"); tester.assertEvaluates(false, "1 && 0"); tester.assertEvaluates(true, "1 && 1"); tester.assertEvaluates(true, "1 && 2"); tester.assertEvaluates(true, "1 && 0.1"); tester.assertEvaluates(false, "0 || 0"); tester.assertEvaluates(true, "0 || 0.1"); tester.assertEvaluates(true, "0 || 1"); tester.assertEvaluates(true, "1 || 0"); tester.assertEvaluates(true, "1 || 1"); tester.assertEvaluates(true, "!0"); tester.assertEvaluates(false, "!1"); tester.assertEvaluates(false, "!2"); tester.assertEvaluates(true, "!0 && 1"); tester.assertEvaluates(0, "2 * (0 && 1)"); tester.assertEvaluates(2, "2 * (1 && 1)"); tester.assertEvaluates(true, "2 + 0 && 1"); tester.assertEvaluates(true, "1 && 0 + 2"); } @Test @Test public void testMapSubspaces() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t))", "tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}"); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t+2))", "tensor<float>(a{},x[2]):{foo:[0,1],bar:[5,8]}"); tester.assertEvaluates("tensor<float>(a{},y[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor<float>(y[2])(t{x:(y)}+t{x:(y+1)})))", "tensor(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); tester.assertEvaluates("tensor<double>(a{},x[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor(x[2])(t{x:(x)}+t{x:(x+1)})))", "tensor<float>(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); } @Test public void testTensorEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("{}", "tensor0", "{}"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "map(tensor0, f(x) (log10(x)))", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:4, {d1:1}:9, {d1:2 }:16 }", "map(tensor0, f(x) (x * x))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "tensor0 == 3", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "3 == tensor0", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "log10(tensor0)", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:-100, {d1:2 }:-1000 }", "- tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:0, {d1:2 }:0 }", "min(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:0, {d1:2 }:10 }", "max(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "tensor0 % 2 == map(tensor0, f(x) (x % 2))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 || 1) == map(tensor0, f(x) (x || 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 && 1) == map(tensor0, f(x) (x && 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "!tensor0 == map(tensor0, f(x) (!x))", "{ {d1:0}:0, {d1:1}:1, {d1:2}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "abs(tensor0)", "{ {x:0}:1, {x:1}:-2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "acos(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "asin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "atan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "ceil(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cos(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cosh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "elu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "exp(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "fabs(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "floor(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "isNan(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "log(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1 }", "log10(tensor0)", "{ {x:0}:1, {x:1}:10 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:2 }", "fmod(tensor0, 3)","{ {x:0}:3, {x:1}:8 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:8 }", "pow(tensor0, 3)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:8, {x:1}:16 }", "ldexp(tensor0,3.1)","{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "relu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "round(tensor0)", "{ {x:0}:1, {x:1}:1.8 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "sigmoid(tensor0)","{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:-1 }", "sign(tensor0)", "{ {x:0}:3, {x:1}:-5 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sinh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:4 }", "square(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:3 }", "sqrt(tensor0)", "{ {x:0}:1, {x:1}:9 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tanh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, count, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:7 }", "reduce(tensor0, max, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, median, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:1 }", "reduce(tensor0, min, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:105 }", "reduce(tensor0, prod, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "reduce(tensor0, sum, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:6 }", "reduce(tensor0, avg, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:2 }", "reduce(tensor0, count, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:35 }", "reduce(tensor0, prod, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12 }", "reduce(tensor0, sum, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:7 }", "reduce(tensor0, max, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:1, {y:1}:5 }", "reduce(tensor0, min, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: 5 }", "sum(tensor0)", "5.0"); tester.assertEvaluates("{ {}:-5 }", "sum(tensor0)", "-5.0"); tester.assertEvaluates("{ {}:12.5 }", "sum(tensor0)", "{ {d1:0}:5.5, {d1:1}:7.0 }"); tester.assertEvaluates("{ {}: 0 }", "sum(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {}: 8.0 }", "avg(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:12.0}"); tester.assertEvaluates("{ {}: 5.0 }", "median(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12.0 }", "sum(tensor0, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {x:0}:6, {x:1}:10.0 }", "sum(tensor0, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "sum(tensor0, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: -1 }", "reduce(tensor0, max)", "tensor(x[2]):[-2,-1]"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "join(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:6, {x:1,y:0}:14 }", "join(tensor0, tensor1, f(x,y) (x+x))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0}:2, {x:1}:-3 }", "join(tensor0, tensor1, f(x,y) (y-x))", "{ {x:0}:3, {x:1}:7 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "tensor0 * tensor0", "{}"); tester.assertEvaluates("{{x:0,y:0,z:0}:0.0}", "( tensor0 * tensor1 ) * ( tensor2 * tensor1 )", "{{x:0}:1}", "{}", "{{y:0,z:0}:1}"); tester.assertEvaluates("tensor(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor(x{}):{ {x:1}:5 }"); tester.assertEvaluates("tensor<double>(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor<float>(x{}):{ {x:1}:5 }"); tester.assertEvaluates("{ {x:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {x:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "tensor0 * tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:8, {x:1,y:0}:12 }", "tensor0 + tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:-2, {x:1,y:0}:2 }", "tensor0 - tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:4 }", "tensor0 / tensor1", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:7 }", "max(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "min(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "pow(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "tensor0 ^ tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "fmod(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "tensor0 % tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:96, {x:1,y:0}:224 }", "ldexp(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5.1 }"); tester.assertEvaluates("{ {x:0,y:0,z:0}:7, {x:0,y:0,z:1}:13, {x:1,y:0,z:0}:21, {x:1,y:0,z:1}:39, {x:0,y:1,z:0}:55, {x:0,y:1,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:0 }", "tensor0 * tensor1", "{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5, {x:1,y:1}:0 }", "{ {y:0,z:0}:7, {y:1,z:0}:11, {y:0,z:1}:13, {y:1,z:1}:0 }"); tester.assertEvaluates("{ {x:0,y:1,z:0}:35, {x:0,y:1,z:1}:65 }", "tensor0 * tensor1", "tensor(x{},y{}):{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5 }", "tensor(y{},z{}):{ {y:1,z:0}:7, {y:2,z:0}:11, {y:1,z:1}:13 })"); tester.assertEvaluates("{{x:0,y:0}:0.0}","tensor1 * tensor2 * tensor3", "{ {x:0}:1 }", "{ {x:1,y:0}:1, {x:0,y:0}:1 }", "{ {x:0,y:0}:1 }"); tester.assertEvaluates("{ {d1:0}:50, {d1:1}:500, {d1:2}:5000 }", "5 * tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:13, {d1:1}:103, {d1:2}:1003 }", "tensor0 + 3","{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:10, {d1:2 }:100 }", "tensor0 / 10", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {h:0}:1.5, {h:1}:1.5 }", "0.5 + tensor0", "{ {h:0}:1.0,{h:1}:1.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0 }", "atan2(tensor0, tensor1)", "{ {x:0}:0, {x:1}:0 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:2, {x:1,y:0}:7 }", "hamming(tensor0, tensor1)", "{ {x:0}:97, {x:1}:-1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 > tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 < tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 >= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 <= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 == tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 ~= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0 }", "tensor0 in [1,2,3]", "{ {x:0}:3, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:0.1 }", "join(tensor0, 0.1, f(x,y) (x*y))", "{ {x:0}:1 }"); tester.assertEvaluates("{ {x:0}:15, {x:1}:4 }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{}"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {newX:0,y:0}:3 }", "rename(tensor0, x, newX)", "{ {x:0,y:0}:3.0 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "rename(tensor0, (x, y), (y, x))", "{ {x:0,y:0}:3.0, {x:0,y:1}:5.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0, {x:0,y:1}:1, {x:1,y:1}:0, {x:0,y:2}:0, {x:1,y:2}:1 }", "tensor(x[2],y[3])(x+1==y)"); tester.assertEvaluates("{ {y:0,x:0}:0, {y:1,x:0}:0, {y:0,x:1}:1, {y:1,x:1}:0, {y:0,x:2}:0, {y:1,x:2}:1 }", "tensor(y[2],x[3])(y+1==x)"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1 }", "tensor(x[1],y[1],z[1])((x==y)*(y==z))"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1, {x:2}:2 }", "range(x[3])"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1, {x:0,y:0,z:1}:0, {x:0,y:1,z:0}:0, {x:0,y:1,z:1}:0, {x:1,y:0,z:0}:0, {x:1,y:0,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:1, }", "diag(x[2],y[2],z[2])"); tester.assertEvaluates("6", "reduce(random(x[2],y[3]), count)"); tester.assertEvaluates("tensor(x[2]):[0.0, 2.0]", "tensor(x[2]):{{x:0}:tensor(y[2]):{{y:0}:((0+0)+a)," + "{y:1}:((0+1)+a)}{y:0}," + "{x:1}:tensor(y[2]):{{y:0}:((1+0)+a)," + "{y:1}:((1+1)+a)}{y:1}" + "}"); tester.assertEvaluates("3.0", "tensor0{x:1}", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("1.2", "tensor0{key:foo,x:0}", true, "{ {key:foo,x:0}:1.2, {key:bar,x:0}:3 }"); tester.assertEvaluates("3.0", "tensor0{bar}", true, "{ {x:foo}:1, {x:bar}:3 }"); tester.assertEvaluates("3.3", "tensor0[2]", "tensor(values[4]):[1.1, 2.2, 3.3, 4.4]]"); tester.assertEvaluates("tensor(x[5]):[0, 1, 2, 3, 4]", "concat(tensor0, tensor1, x)", "tensor(x[2]):[0, 1]", "tensor(x[3]):[2, 3, 4])"); tester.assertEvaluates("{ {x:0}:0.25, {x:1}:0.75 }", "l1_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {x:0}:0.31622776601683794, {x:1}:0.9486832980505138 }", "l2_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {y:0}:81.0 }", "matmul(tensor0, tensor1, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "softmax(tensor0, x)", "{ {x:0}:1, {x:1}:1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:81.0, {x:1,y:0}:88.0 }", "xw_plus_b(tensor0, tensor1, tensor2, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }", "{ {x:0}:0, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0, {x:2}:0, {x:3}:1 }", "argmax(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0, {x:2}:1, {x:3}:0 }", "argmin(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("tensor(y{}):{{y:6}:0}}", "matmul(tensor0, diag(x[5],y[7]), x)", "tensor(x{},y{}):{{x:4,y:6}:1})"); tester.assertEvaluates("tensor(y{}):{{y:6}:10}}
class EvaluationTestCase { private final double tolerance = 0.000001; private void verifyStringValueToString(String s) { s = '"' + s + '"'; Value val = Value.parse(s); assertTrue(val instanceof StringValue); assertEquals(s, val.toString()); } @Test public void testStringValueToString() { verifyStringValueToString(""); verifyStringValueToString("something"); verifyStringValueToString("needs \\\" escape"); verifyStringValueToString("\\\\"); verifyStringValueToString("\\\""); verifyStringValueToString("\\f"); verifyStringValueToString("\\female"); verifyStringValueToString("\\n"); verifyStringValueToString("\\nude"); verifyStringValueToString("\\r"); verifyStringValueToString("fa\\rt"); verifyStringValueToString("\\t"); verifyStringValueToString("fe\\tish"); verifyStringValueToString("\\f"); verifyStringValueToString("\\\\hx"); verifyStringValueToString("\\\\xx"); verifyStringValueToString("\\\\x10081977"); } @Test public void testEvaluationOrder() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(-4, "1 + -2 + -3"); tester.assertEvaluates(2, "1 - (2 - 3)"); tester.assertEvaluates(-4, "(1 - 2) - 3"); tester.assertEvaluates(-4, "1 - 2 - 3"); } @Test public void testEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(0.5, "0.5"); tester.assertEvaluates(-0.5, "-0.5"); tester.assertEvaluates(0.5, "one_half"); tester.assertEvaluates(-0.5, "-one_half"); tester.assertEvaluates(0, "nonexisting"); tester.assertEvaluates(0.75, "0.5 + 0.25"); tester.assertEvaluates(0.75, "one_half + a_quarter"); tester.assertEvaluates(1.25, "0.5 - 0.25 + one"); tester.assertEvaluates(9.0, "3 ^ 2"); tester.assertEvaluates(1, "if(\"a\"==\"a\",1,0)"); tester.assertEvaluates(26, "2*3+4*5"); tester.assertEvaluates(1, "2/6+4/6"); tester.assertEvaluates(2 * 3 * 4 + 3 * 4 * 5 - 4 * 200 / 10, "2*3*4+3*4*5-4*200/10"); tester.assertEvaluates(3, "1 + 10 % 6 / 2"); tester.assertEvaluates(10.0, "3 ^ 2 + 1"); tester.assertEvaluates(18.0, "2 * 3 ^ 2"); tester.assertEvaluates(-4, "1 - 2 - 3"); tester.assertEvaluates(Math.pow(4, 9), "4^3^2"); tester.assertEvaluates(2 * (3 * 4 + 3) * (4 * 5 - 4 * 200) / 10, "2*(3*4+3)*(4*5-4*200)/10"); tester.assertEvaluates(0.5, "if( 2<3, one_half, one_quarter)"); tester.assertEvaluates(0.25,"if( 2>3, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1==1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 1<=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1>=1.1, one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( 0.33333333333333333333~=1/3, one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 0.33333333333333333333~=1/35, one_half, a_quarter)"); tester.assertEvaluates(5.5, "if(one_half in [one_quarter,one_half], one_half+5,log(one_quarter) * one_quarter)"); tester.assertEvaluates(0.5, "if( 1 in [1,2 , 3], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( 1 in [ 2,3,4], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [\"foo\",\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( \"foo\" in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( foo in [foo,\"bar\"], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( \"foo\" in [\"baz\",\"boz\"], one_half, a_quarter)"); tester.assertEvaluates(0.5, "if( one in [0, 1, 2], one_half, a_quarter)"); tester.assertEvaluates(0.25,"if( one in [2], one_half, a_quarter)"); tester.assertEvaluates(2.5, "if(1.0, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(0.0, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1.0-1.1, 2.5, 3.5)"); tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5)"); RankingExpression e = tester.assertEvaluates(3.5, "if(1.0-1.0, 2.5, 3.5, 0.3)"); assertEquals(0.3d, ((IfNode) e.getRoot()).getTrueProbability(), tolerance); tester.assertEvaluates(new BooleanValue(true), "2<3"); tester.assertEvaluates(new BooleanValue(false), "2>3"); tester.assertEvaluates(new BooleanValue(false), "if (3>2, 2>3, 5.0)"); tester.assertEvaluates(new BooleanValue(true), "2>3<1"); tester.assertEvaluates(2.5, "if(2>3<1, 2.5, 3.5)"); tester.assertEvaluates(2.5, "if(1+1>3<1+0, 2.5, 3.5)"); tester.assertEvaluates(0, "sin(0)"); tester.assertEvaluates(1, "cos(0)"); tester.assertEvaluates(8, "pow(4/2,min(cos(0)*3,5))"); tester.assertEvaluates(0, "random(1)"); tester.assertEvaluates(0, "random(foo)"); tester.assertEvaluates(1.25, "5*if(1>=1.1, one_half, if(min(1,2)<max(1,2),if (\"foo\" in [\"foo\",\"bar\"],a_quarter,3000), 0.57345347))"); } @Test public void testBooleanEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates(false, "false"); tester.assertEvaluates(true, "true"); tester.assertEvaluates(false, "0 && 0"); tester.assertEvaluates(false, "0 && 1"); tester.assertEvaluates(false, "1 && 0"); tester.assertEvaluates(true, "1 && 1"); tester.assertEvaluates(true, "1 && 2"); tester.assertEvaluates(true, "1 && 0.1"); tester.assertEvaluates(false, "0 || 0"); tester.assertEvaluates(true, "0 || 0.1"); tester.assertEvaluates(true, "0 || 1"); tester.assertEvaluates(true, "1 || 0"); tester.assertEvaluates(true, "1 || 1"); tester.assertEvaluates(true, "!0"); tester.assertEvaluates(false, "!1"); tester.assertEvaluates(false, "!2"); tester.assertEvaluates(true, "!0 && 1"); tester.assertEvaluates(0, "2 * (0 && 1)"); tester.assertEvaluates(2, "2 * (1 && 1)"); tester.assertEvaluates(true, "2 + 0 && 1"); tester.assertEvaluates(true, "1 && 0 + 2"); } @Test @Test public void testMapSubspaces() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t))", "tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}"); tester.assertEvaluates("tensor<float>(a{},x[2]):{foo:[2,3],bar:[7,10]}", "map_subspaces(tensor0, f(t)(t+2))", "tensor<float>(a{},x[2]):{foo:[0,1],bar:[5,8]}"); tester.assertEvaluates("tensor<float>(a{},y[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor<float>(y[2])(t{x:(y)}+t{x:(y+1)})))", "tensor(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); tester.assertEvaluates("tensor<double>(a{},x[2]):{foo:[3,5],bar:[9,11]}", "map_subspaces(tensor0, f(t)(tensor(x[2])(t{x:(x)}+t{x:(x+1)})))", "tensor<float>(a{},x[3]):{foo:[1,2,3],bar:[4,5,6]}"); } @Test public void testTensorEvaluation() { EvaluationTester tester = new EvaluationTester(); tester.assertEvaluates("{}", "tensor0", "{}"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "map(tensor0, f(x) (log10(x)))", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:4, {d1:1}:9, {d1:2 }:16 }", "map(tensor0, f(x) (x * x))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "tensor0 == 3", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:1, {d1:2 }:0 }", "3 == tensor0", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:2, {d1:2 }:3 }", "log10(tensor0)", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:-100, {d1:2 }:-1000 }", "- tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:-10, {d1:1}:0, {d1:2 }:0 }", "min(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:0, {d1:1}:0, {d1:2 }:10 }", "max(tensor0, 0)", "{ {d1:0}:-10, {d1:1}:0, {d1:2}:10 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "tensor0 % 2 == map(tensor0, f(x) (x % 2))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 || 1) == map(tensor0, f(x) (x || 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "(tensor0 && 1) == map(tensor0, f(x) (x && 1))", "{ {d1:0}:2, {d1:1}:3, {d1:2}:4 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:1, {d1:2 }:1 }", "!tensor0 == map(tensor0, f(x) (!x))", "{ {d1:0}:0, {d1:1}:1, {d1:2}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "abs(tensor0)", "{ {x:0}:1, {x:1}:-2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "acos(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "asin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "atan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "ceil(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cos(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "cosh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "elu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:1 }", "exp(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "fabs(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "floor(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "isNan(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "log(tensor0)", "{ {x:0}:1, {x:1}:1 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1 }", "log10(tensor0)", "{ {x:0}:1, {x:1}:10 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:2 }", "fmod(tensor0, 3)","{ {x:0}:3, {x:1}:8 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:8 }", "pow(tensor0, 3)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:8, {x:1}:16 }", "ldexp(tensor0,3.1)","{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "relu(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:2 }", "round(tensor0)", "{ {x:0}:1, {x:1}:1.8 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "sigmoid(tensor0)","{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:-1 }", "sign(tensor0)", "{ {x:0}:3, {x:1}:-5 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sin(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "sinh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:4 }", "square(tensor0)", "{ {x:0}:1, {x:1}:2 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:3 }", "sqrt(tensor0)", "{ {x:0}:1, {x:1}:9 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tan(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0 }", "tanh(tensor0)", "{ {x:0}:0, {x:1}:0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, count, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:7 }", "reduce(tensor0, max, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, median, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:1 }", "reduce(tensor0, min, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:105 }", "reduce(tensor0, prod, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "reduce(tensor0, sum, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:4 }", "reduce(tensor0, avg)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:6 }", "reduce(tensor0, avg, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:2, {y:1}:2 }", "reduce(tensor0, count, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:35 }", "reduce(tensor0, prod, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12 }", "reduce(tensor0, sum, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:3, {y:1}:7 }", "reduce(tensor0, max, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {y:0}:1, {y:1}:5 }", "reduce(tensor0, min, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: 5 }", "sum(tensor0)", "5.0"); tester.assertEvaluates("{ {}:-5 }", "sum(tensor0)", "-5.0"); tester.assertEvaluates("{ {}:12.5 }", "sum(tensor0)", "{ {d1:0}:5.5, {d1:1}:7.0 }"); tester.assertEvaluates("{ {}: 0 }", "sum(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {}: 8.0 }", "avg(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:12.0}"); tester.assertEvaluates("{ {}: 5.0 }", "median(tensor0)", "{ {d1:0}:5.0, {d1:1}:7.0, {d1:2}:-12.0}"); tester.assertEvaluates("{ {y:0}:4, {y:1}:12.0 }", "sum(tensor0, x)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {x:0}:6, {x:1}:10.0 }", "sum(tensor0, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}:16 }", "sum(tensor0, x, y)", "{ {x:0,y:0}:1.0, {x:1,y:0}:3.0, {x:0,y:1}:5.0, {x:1,y:1}:7.0 }"); tester.assertEvaluates("{ {}: -1 }", "reduce(tensor0, max)", "tensor(x[2]):[-2,-1]"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "join(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:6, {x:1,y:0}:14 }", "join(tensor0, tensor1, f(x,y) (x+x))", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0}:2, {x:1}:-3 }", "join(tensor0, tensor1, f(x,y) (y-x))", "{ {x:0}:3, {x:1}:7 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "tensor0 * tensor0", "{}"); tester.assertEvaluates("{{x:0,y:0,z:0}:0.0}", "( tensor0 * tensor1 ) * ( tensor2 * tensor1 )", "{{x:0}:1}", "{}", "{{y:0,z:0}:1}"); tester.assertEvaluates("tensor(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor(x{}):{ {x:1}:5 }"); tester.assertEvaluates("tensor<double>(x{}):{}", "tensor0 * tensor1", "{ {x:0}:3 }", "tensor<float>(x{}):{ {x:1}:5 }"); tester.assertEvaluates("{ {x:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {x:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15 }", "tensor0 * tensor1", "{ {x:0}:3 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:15, {x:1,y:0}:35 }", "tensor0 * tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:8, {x:1,y:0}:12 }", "tensor0 + tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:-2, {x:1,y:0}:2 }", "tensor0 - tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:4 }", "tensor0 / tensor1", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0,y:0}:5, {x:1,y:0}:7 }", "max(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "min(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "pow(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:243, {x:1,y:0}:16807 }", "tensor0 ^ tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "fmod(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:2 }", "tensor0 % tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:96, {x:1,y:0}:224 }", "ldexp(tensor0, tensor1)", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5.1 }"); tester.assertEvaluates("{ {x:0,y:0,z:0}:7, {x:0,y:0,z:1}:13, {x:1,y:0,z:0}:21, {x:1,y:0,z:1}:39, {x:0,y:1,z:0}:55, {x:0,y:1,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:0 }", "tensor0 * tensor1", "{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5, {x:1,y:1}:0 }", "{ {y:0,z:0}:7, {y:1,z:0}:11, {y:0,z:1}:13, {y:1,z:1}:0 }"); tester.assertEvaluates("{ {x:0,y:1,z:0}:35, {x:0,y:1,z:1}:65 }", "tensor0 * tensor1", "tensor(x{},y{}):{ {x:0,y:0}:1, {x:1,y:0}:3, {x:0,y:1}:5 }", "tensor(y{},z{}):{ {y:1,z:0}:7, {y:2,z:0}:11, {y:1,z:1}:13 })"); tester.assertEvaluates("{{x:0,y:0}:0.0}","tensor1 * tensor2 * tensor3", "{ {x:0}:1 }", "{ {x:1,y:0}:1, {x:0,y:0}:1 }", "{ {x:0,y:0}:1 }"); tester.assertEvaluates("{ {d1:0}:50, {d1:1}:500, {d1:2}:5000 }", "5 * tensor0", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:13, {d1:1}:103, {d1:2}:1003 }", "tensor0 + 3","{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {d1:0}:1, {d1:1}:10, {d1:2 }:100 }", "tensor0 / 10", "{ {d1:0}:10, {d1:1}:100, {d1:2}:1000 }"); tester.assertEvaluates("{ {h:0}:1.5, {h:1}:1.5 }", "0.5 + tensor0", "{ {h:0}:1.0,{h:1}:1.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0 }", "atan2(tensor0, tensor1)", "{ {x:0}:0, {x:1}:0 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:2, {x:1,y:0}:7 }", "hamming(tensor0, tensor1)", "{ {x:0}:97, {x:1}:-1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 > tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 < tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 >= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 <= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:5 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 == tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:1 }", "tensor0 ~= tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0 }", "tensor0 in [1,2,3]", "{ {x:0}:3, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:0.1 }", "join(tensor0, 0.1, f(x,y) (x*y))", "{ {x:0}:1 }"); tester.assertEvaluates("{ {x:0}:15, {x:1}:4 }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{ {x:0}:3 }", "{ {x:0}:5, {x:1}:4 }"); tester.assertEvaluates("{ }", "merge(tensor0, tensor1, f(x,y) (x*y))", "{}"); tester.assertEvaluates("{ {x:0,y:0}:1, {x:1,y:0}:0 }", "tensor0 != tensor1", "{ {x:0}:3, {x:1}:7 }", "{ {y:0}:7 }"); tester.assertEvaluates("{ {newX:0,y:0}:3 }", "rename(tensor0, x, newX)", "{ {x:0,y:0}:3.0 }"); tester.assertEvaluates("{ {x:0,y:0}:3, {x:1,y:0}:5 }", "rename(tensor0, (x, y), (y, x))", "{ {x:0,y:0}:3.0, {x:0,y:1}:5.0 }"); tester.assertEvaluates("{ {x:0,y:0}:0, {x:1,y:0}:0, {x:0,y:1}:1, {x:1,y:1}:0, {x:0,y:2}:0, {x:1,y:2}:1 }", "tensor(x[2],y[3])(x+1==y)"); tester.assertEvaluates("{ {y:0,x:0}:0, {y:1,x:0}:0, {y:0,x:1}:1, {y:1,x:1}:0, {y:0,x:2}:0, {y:1,x:2}:1 }", "tensor(y[2],x[3])(y+1==x)"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1 }", "tensor(x[1],y[1],z[1])((x==y)*(y==z))"); tester.assertEvaluates("{ {x:0}:0, {x:1}:1, {x:2}:2 }", "range(x[3])"); tester.assertEvaluates("{ {x:0,y:0,z:0}:1, {x:0,y:0,z:1}:0, {x:0,y:1,z:0}:0, {x:0,y:1,z:1}:0, {x:1,y:0,z:0}:0, {x:1,y:0,z:1}:0, {x:1,y:1,z:0}:0, {x:1,y:1,z:1}:1, }", "diag(x[2],y[2],z[2])"); tester.assertEvaluates("6", "reduce(random(x[2],y[3]), count)"); tester.assertEvaluates("tensor(x[2]):[0.0, 2.0]", "tensor(x[2]):{{x:0}:tensor(y[2]):{{y:0}:((0+0)+a)," + "{y:1}:((0+1)+a)}{y:0}," + "{x:1}:tensor(y[2]):{{y:0}:((1+0)+a)," + "{y:1}:((1+1)+a)}{y:1}" + "}"); tester.assertEvaluates("3.0", "tensor0{x:1}", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("1.2", "tensor0{key:foo,x:0}", true, "{ {key:foo,x:0}:1.2, {key:bar,x:0}:3 }"); tester.assertEvaluates("3.0", "tensor0{bar}", true, "{ {x:foo}:1, {x:bar}:3 }"); tester.assertEvaluates("3.3", "tensor0[2]", "tensor(values[4]):[1.1, 2.2, 3.3, 4.4]]"); tester.assertEvaluates("tensor(x[5]):[0, 1, 2, 3, 4]", "concat(tensor0, tensor1, x)", "tensor(x[2]):[0, 1]", "tensor(x[3]):[2, 3, 4])"); tester.assertEvaluates("{ {x:0}:0.25, {x:1}:0.75 }", "l1_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {x:0}:0.31622776601683794, {x:1}:0.9486832980505138 }", "l2_normalize(tensor0, x)", "{ {x:0}:1, {x:1}:3 }"); tester.assertEvaluates("{ {y:0}:81.0 }", "matmul(tensor0, tensor1, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }"); tester.assertEvaluates("{ {x:0}:0.5, {x:1}:0.5 }", "softmax(tensor0, x)", "{ {x:0}:1, {x:1}:1 }", "{ {y:0}:1 }"); tester.assertEvaluates("{ {x:0,y:0}:81.0, {x:1,y:0}:88.0 }", "xw_plus_b(tensor0, tensor1, tensor2, x)", "{ {x:0}:15, {x:1}:12 }", "{ {y:0}:3 }", "{ {x:0}:0, {x:1}:7 }"); tester.assertEvaluates("{ {x:0}:1, {x:1}:0, {x:2}:0, {x:3}:1 }", "argmax(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("{ {x:0}:0, {x:1}:0, {x:2}:1, {x:3}:0 }", "argmin(tensor0, x)", "{ {x:0}:15, {x:1}:12, {x:2}:7, {x:3}:15 }"); tester.assertEvaluates("tensor(y{}):{{y:6}:0}}", "matmul(tensor0, diag(x[5],y[7]), x)", "tensor(x{},y{}):{{x:4,y:6}:1})"); tester.assertEvaluates("tensor(y{}):{{y:6}:10}}
Consider outputting expected vs actual values so its easier to pinpoint the issue if this happens.
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch"); } if (block.cells.length != blockSize) { throw new IllegalStateException("block length mismatch"); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("map vs list size mismatch"); } }
throw new IllegalStateException("block length mismatch");
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size: " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch: block + count + " address maps to + index.sparseMap.get(block.sparseAddr)); } if (block.cells.length != blockSize) { throw new IllegalStateException("dense subspace size mismatch, expected " + blockSize + " cells, but got: " + block.cells.length); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("mismatch: list size is " + count + " but map size is " + index.sparseMap.size()); } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
Consider outputting expected vs actual values so its easier to pinpoint the issue if this happens.
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch"); } if (block.cells.length != blockSize) { throw new IllegalStateException("block length mismatch"); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("map vs list size mismatch"); } }
throw new IllegalStateException("map vs list size mismatch");
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size: " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch: block + count + " address maps to + index.sparseMap.get(block.sparseAddr)); } if (block.cells.length != blockSize) { throw new IllegalStateException("dense subspace size mismatch, expected " + blockSize + " cells, but got: " + block.cells.length); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("mismatch: list size is " + count + " but map size is " + index.sparseMap.size()); } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
Consider outputting the sparse address so its easier to pinpoint the issue if this happens.
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch"); } if (block.cells.length != blockSize) { throw new IllegalStateException("block length mismatch"); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("map vs list size mismatch"); } }
throw new IllegalStateException("map vs list mismatch");
private MixedTensor(TensorType type, List<DenseBlock> cellBlocks, Index index) { this.type = type; this.blockSize = index.denseSubspaceSize(); this.cellBlocks = List.copyOf(cellBlocks); this.index = index; if (this.blockSize < 1) { throw new IllegalStateException("invalid dense subspace size: " + blockSize); } long count = 0; for (var block : this.cellBlocks) { if (index.sparseMap.get(block.sparseAddr) != count) { throw new IllegalStateException("map vs list mismatch: block + count + " address maps to + index.sparseMap.get(block.sparseAddr)); } if (block.cells.length != blockSize) { throw new IllegalStateException("dense subspace size mismatch, expected " + blockSize + " cells, but got: " + block.cells.length); } ++count; } if (count != index.sparseMap.size()) { throw new IllegalStateException("mismatch: list size is " + count + " but map size is " + index.sparseMap.size()); } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
class DenseBlock { final TensorAddress sparseAddr; final double[] cells; DenseBlock(TensorAddress sparseAddr, double[] cells) { this.sparseAddr = sparseAddr; this.cells = cells; } @Override public int hashCode() { return Objects.hash(sparseAddr, cells); } @Override public boolean equals(Object other) { if (other instanceof DenseBlock o) { return sparseAddr.equals(o.sparseAddr) && Arrays.equals(cells, o.cells); } return false; } }
```suggestion ? cfg.maxContentSize() : Math.min(Runtime.getRuntime().maxMemory() / 2, Integer.MAX_VALUE - 8); ``` If you want a limit of 2GB here, should probably set it to what can be held in an array as well.
private static long resolveMaxContentSize(ConnectorConfig cfg) { long maxContentSize = cfg.maxContentSize() != 0 ? cfg.maxContentSize() : Math.min(Runtime.getRuntime().maxMemory() / 2, Integer.MAX_VALUE); log.fine(() -> Text.format("maxContentSize=%d", maxContentSize)); return maxContentSize; }
? cfg.maxContentSize() : Math.min(Runtime.getRuntime().maxMemory() / 2, Integer.MAX_VALUE);
private static long resolveMaxContentSize(ConnectorConfig cfg) { long maxContentSize = cfg.maxContentSize() != 0 ? cfg.maxContentSize() : Math.min(Runtime.getRuntime().maxMemory() / 2, Integer.MAX_VALUE - 8); log.fine(() -> Text.format("maxContentSize=%d", maxContentSize)); return maxContentSize; }
class ServletRequestReader { private enum State { NOT_STARTED, READING, ALL_DATA_READ, REQUEST_CONTENT_CLOSED } private static final Logger log = Logger.getLogger(ServletRequestReader.class.getName()); private static final int BUFFER_SIZE_BYTES = 8 * 1024; private final Object monitor = new Object(); private final HttpServletRequest req; private final ContentChannel requestContentChannel; private final Janitor janitor; private final RequestMetricReporter metricReporter; private ServletInputStream in; private Throwable errorDuringRead; private int bytesRead; /** * Rules: * 1. If state != State.READING, then numberOfOutstandingUserCalls must not increase * 2. The _first time_ (finishedFuture is completed OR all data is read) AND numberOfOutstandingUserCalls == 0, * the request content channel should be closed * 3. finishedFuture must not be completed when holding the monitor * 4. completing finishedFuture with an exception must be done synchronously * to prioritize failures being transported to the response. * 5. All completion handlers (both for write and complete) must not be * called from a user (request handler) owned thread * (i.e. when being called from user code, don't call back into user code.) */ private State state = State.NOT_STARTED; /** * Number of calls that we're waiting for from user code. * There are two classes of such calls: * 1) calls to requestContentChannel.write that we're waiting for to complete * 2) completion handlers given to requestContentChannel.write that the user must call. * * As long as we're waiting for such calls, we're not allowed to: * - close the request content channel (currently only required by tests) * - complete the finished future non-exceptionally, * since then we would not be able to report writeCompletionHandler.failed(exception) calls */ private int numberOfOutstandingUserCalls = 0; /** * When this future completes there will be no more calls against the servlet input stream. * The framework is still allowed to invoke us though. * * The future might complete in the servlet framework thread, user thread or executor thread. * * All completions of finishedFuture, except those done when closing the request content channel, * must be followed by calls to either onAllDataRead or decreasePendingAndCloseRequestContentChannelConditionally. * Those two functions will ensure that the request content channel is closed at the right time. * If calls to those methods does not close the request content channel immediately, * there is some outstanding completion callback that will later come in and complete the request. */ private final CompletableFuture<Void> finishedFuture = new CompletableFuture<>(); ServletRequestReader( Request req, ContentChannel requestContentChannel, Janitor janitor, RequestMetricReporter metricReporter) { this.req = Objects.requireNonNull(req); var cfg = RequestUtils.getConnector(req).connectorConfig(); long maxContentSize = resolveMaxContentSize(cfg); var msgTemplate = resolveMaxContentSizeErrorMessage(cfg); this.requestContentChannel = maxContentSize >= 0 ? new ByteLimitedContentChannel(Objects.requireNonNull(requestContentChannel), maxContentSize, msgTemplate) : Objects.requireNonNull(requestContentChannel); this.janitor = Objects.requireNonNull(janitor); this.metricReporter = Objects.requireNonNull(metricReporter); } private static String resolveMaxContentSizeErrorMessage(ConnectorConfig cfg) { return cfg.maxContentSizeErrorMessageTemplate().strip(); } /** Register read listener to start reading request data */ void start() { try { ServletInputStream in; synchronized (monitor) { if (state != State.NOT_STARTED) throw new IllegalStateException("State=" + state); in = req.getInputStream(); this.in = in; state = State.READING; } in.setReadListener(new Listener()); } catch (Throwable t) { fail(t); } } CompletableFuture<Void> finishedFuture() { return finishedFuture; } private class Listener implements ReadListener { @Override public void onDataAvailable() throws IOException { ServletInputStream in; synchronized (monitor) { in = ServletRequestReader.this.in; } while (in.isReady()) { final byte[] buffer = new byte[BUFFER_SIZE_BYTES]; int numBytesRead; synchronized (monitor) { numBytesRead = in.read(buffer); if (numBytesRead < 0) { return; } if (state != State.READING) { assert finishedFuture.isCompletedExceptionally(); return; } numberOfOutstandingUserCalls += 2; bytesRead += numBytesRead; } try { requestContentChannel.write(ByteBuffer.wrap(buffer, 0, numBytesRead), new CompletionHandler() { @Override public void completed() { decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } @Override public void failed(final Throwable t) { finishedFuture.completeExceptionally(t); decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } }); metricReporter.successfulRead(numBytesRead); } catch (Throwable t) { finishedFuture.completeExceptionally(t); } finally { decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } } } @Override public void onError(final Throwable t) { fail(t); } @Override public void onAllDataRead() { doneReading(null); } } void fail(Throwable t) { doneReading(t); finishedFuture.completeExceptionally(t); } private void decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally() { boolean shouldCloseRequestContentChannel; synchronized (monitor) { assertStateNotEquals(state, State.REQUEST_CONTENT_CLOSED); numberOfOutstandingUserCalls -= 1; shouldCloseRequestContentChannel = numberOfOutstandingUserCalls == 0 && state == State.ALL_DATA_READ; if (shouldCloseRequestContentChannel) { state = State.REQUEST_CONTENT_CLOSED; } } if (shouldCloseRequestContentChannel) { janitor.scheduleTask(this::closeRequestContentChannel); } } private void assertStateNotEquals(State state, State notExpectedState) { if (state == notExpectedState) { AssertionError e = new AssertionError("State should not be " + notExpectedState); log.log(Level.WARNING, "Assertion failed. " + "numberOfOutstandingUserCalls = " + numberOfOutstandingUserCalls + ", isDone = " + finishedFuture.isDone(), e); throw e; } } private void doneReading(Throwable t) { boolean shouldCloseRequestContentChannel; int bytesRead; synchronized (monitor) { errorDuringRead = t; if (state == State.REQUEST_CONTENT_CLOSED) { return; } if (state == State.READING) { state = State.ALL_DATA_READ; } shouldCloseRequestContentChannel = numberOfOutstandingUserCalls == 0; if (shouldCloseRequestContentChannel) { state = State.REQUEST_CONTENT_CLOSED; } bytesRead = this.bytesRead; } if (shouldCloseRequestContentChannel) { closeRequestContentChannel(); } metricReporter.contentSize(bytesRead); } private void closeRequestContentChannel() { Throwable readError; synchronized (monitor) { readError = this.errorDuringRead; } try { if (readError != null) requestContentChannel.onError(readError); CompletableFuture<Void> completedCalledFuture = new CompletableFuture<>(); requestContentChannel.close(new CompletionHandler() { @Override public void completed() { completedCalledFuture.complete(null); } @Override public void failed(Throwable t) { finishedFuture.completeExceptionally(t); } }); completedCalledFuture.whenComplete((__, ___) -> finishedFuture.complete(null)); } catch (Throwable t) { finishedFuture.completeExceptionally(t); } } private static class ByteLimitedContentChannel implements ContentChannel { private final long maxContentSize; private final String messageTemplate; private final AtomicLong bytesWritten = new AtomicLong(); private final ContentChannel delegate; ByteLimitedContentChannel(ContentChannel delegate, long maxContentSize, String messageTemplate) { this.delegate = delegate; this.maxContentSize = maxContentSize; this.messageTemplate = messageTemplate; } @Override public void write(ByteBuffer buf, CompletionHandler handler) { long written = bytesWritten.addAndGet(buf.remaining()); if (written > maxContentSize) { handler.failed(new RequestException( Response.Status.REQUEST_TOO_LONG, messageTemplate.formatted(written, maxContentSize))); return; } delegate.write(buf, handler); } @Override public void close(CompletionHandler h) { delegate.close(h); } @Override public void onError(Throwable t) { delegate.onError(t); } } }
class ServletRequestReader { private enum State { NOT_STARTED, READING, ALL_DATA_READ, REQUEST_CONTENT_CLOSED } private static final Logger log = Logger.getLogger(ServletRequestReader.class.getName()); private static final int BUFFER_SIZE_BYTES = 8 * 1024; private final Object monitor = new Object(); private final HttpServletRequest req; private final ContentChannel requestContentChannel; private final Janitor janitor; private final RequestMetricReporter metricReporter; private ServletInputStream in; private Throwable errorDuringRead; private int bytesRead; /** * Rules: * 1. If state != State.READING, then numberOfOutstandingUserCalls must not increase * 2. The _first time_ (finishedFuture is completed OR all data is read) AND numberOfOutstandingUserCalls == 0, * the request content channel should be closed * 3. finishedFuture must not be completed when holding the monitor * 4. completing finishedFuture with an exception must be done synchronously * to prioritize failures being transported to the response. * 5. All completion handlers (both for write and complete) must not be * called from a user (request handler) owned thread * (i.e. when being called from user code, don't call back into user code.) */ private State state = State.NOT_STARTED; /** * Number of calls that we're waiting for from user code. * There are two classes of such calls: * 1) calls to requestContentChannel.write that we're waiting for to complete * 2) completion handlers given to requestContentChannel.write that the user must call. * * As long as we're waiting for such calls, we're not allowed to: * - close the request content channel (currently only required by tests) * - complete the finished future non-exceptionally, * since then we would not be able to report writeCompletionHandler.failed(exception) calls */ private int numberOfOutstandingUserCalls = 0; /** * When this future completes there will be no more calls against the servlet input stream. * The framework is still allowed to invoke us though. * * The future might complete in the servlet framework thread, user thread or executor thread. * * All completions of finishedFuture, except those done when closing the request content channel, * must be followed by calls to either onAllDataRead or decreasePendingAndCloseRequestContentChannelConditionally. * Those two functions will ensure that the request content channel is closed at the right time. * If calls to those methods does not close the request content channel immediately, * there is some outstanding completion callback that will later come in and complete the request. */ private final CompletableFuture<Void> finishedFuture = new CompletableFuture<>(); ServletRequestReader( Request req, ContentChannel requestContentChannel, Janitor janitor, RequestMetricReporter metricReporter) { this.req = Objects.requireNonNull(req); var cfg = RequestUtils.getConnector(req).connectorConfig(); long maxContentSize = resolveMaxContentSize(cfg); var msgTemplate = resolveMaxContentSizeErrorMessage(cfg); this.requestContentChannel = maxContentSize >= 0 ? new ByteLimitedContentChannel(Objects.requireNonNull(requestContentChannel), maxContentSize, msgTemplate) : Objects.requireNonNull(requestContentChannel); this.janitor = Objects.requireNonNull(janitor); this.metricReporter = Objects.requireNonNull(metricReporter); } private static String resolveMaxContentSizeErrorMessage(ConnectorConfig cfg) { return cfg.maxContentSizeErrorMessageTemplate().strip(); } /** Register read listener to start reading request data */ void start() { try { ServletInputStream in; synchronized (monitor) { if (state != State.NOT_STARTED) throw new IllegalStateException("State=" + state); in = req.getInputStream(); this.in = in; state = State.READING; } in.setReadListener(new Listener()); } catch (Throwable t) { fail(t); } } CompletableFuture<Void> finishedFuture() { return finishedFuture; } private class Listener implements ReadListener { @Override public void onDataAvailable() throws IOException { ServletInputStream in; synchronized (monitor) { in = ServletRequestReader.this.in; } while (in.isReady()) { final byte[] buffer = new byte[BUFFER_SIZE_BYTES]; int numBytesRead; synchronized (monitor) { numBytesRead = in.read(buffer); if (numBytesRead < 0) { return; } if (state != State.READING) { assert finishedFuture.isCompletedExceptionally(); return; } numberOfOutstandingUserCalls += 2; bytesRead += numBytesRead; } try { requestContentChannel.write(ByteBuffer.wrap(buffer, 0, numBytesRead), new CompletionHandler() { @Override public void completed() { decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } @Override public void failed(final Throwable t) { finishedFuture.completeExceptionally(t); decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } }); metricReporter.successfulRead(numBytesRead); } catch (Throwable t) { finishedFuture.completeExceptionally(t); } finally { decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally(); } } } @Override public void onError(final Throwable t) { fail(t); } @Override public void onAllDataRead() { doneReading(null); } } void fail(Throwable t) { doneReading(t); finishedFuture.completeExceptionally(t); } private void decreaseOutstandingUserCallsAndCloseRequestContentChannelConditionally() { boolean shouldCloseRequestContentChannel; synchronized (monitor) { assertStateNotEquals(state, State.REQUEST_CONTENT_CLOSED); numberOfOutstandingUserCalls -= 1; shouldCloseRequestContentChannel = numberOfOutstandingUserCalls == 0 && state == State.ALL_DATA_READ; if (shouldCloseRequestContentChannel) { state = State.REQUEST_CONTENT_CLOSED; } } if (shouldCloseRequestContentChannel) { janitor.scheduleTask(this::closeRequestContentChannel); } } private void assertStateNotEquals(State state, State notExpectedState) { if (state == notExpectedState) { AssertionError e = new AssertionError("State should not be " + notExpectedState); log.log(Level.WARNING, "Assertion failed. " + "numberOfOutstandingUserCalls = " + numberOfOutstandingUserCalls + ", isDone = " + finishedFuture.isDone(), e); throw e; } } private void doneReading(Throwable t) { boolean shouldCloseRequestContentChannel; int bytesRead; synchronized (monitor) { errorDuringRead = t; if (state == State.REQUEST_CONTENT_CLOSED) { return; } if (state == State.READING) { state = State.ALL_DATA_READ; } shouldCloseRequestContentChannel = numberOfOutstandingUserCalls == 0; if (shouldCloseRequestContentChannel) { state = State.REQUEST_CONTENT_CLOSED; } bytesRead = this.bytesRead; } if (shouldCloseRequestContentChannel) { closeRequestContentChannel(); } metricReporter.contentSize(bytesRead); } private void closeRequestContentChannel() { Throwable readError; synchronized (monitor) { readError = this.errorDuringRead; } try { if (readError != null) requestContentChannel.onError(readError); CompletableFuture<Void> completedCalledFuture = new CompletableFuture<>(); requestContentChannel.close(new CompletionHandler() { @Override public void completed() { completedCalledFuture.complete(null); } @Override public void failed(Throwable t) { finishedFuture.completeExceptionally(t); } }); completedCalledFuture.whenComplete((__, ___) -> finishedFuture.complete(null)); } catch (Throwable t) { finishedFuture.completeExceptionally(t); } } private static class ByteLimitedContentChannel implements ContentChannel { private final long maxContentSize; private final String messageTemplate; private final AtomicLong bytesWritten = new AtomicLong(); private final ContentChannel delegate; ByteLimitedContentChannel(ContentChannel delegate, long maxContentSize, String messageTemplate) { this.delegate = delegate; this.maxContentSize = maxContentSize; this.messageTemplate = messageTemplate; } @Override public void write(ByteBuffer buf, CompletionHandler handler) { long written = bytesWritten.addAndGet(buf.remaining()); if (written > maxContentSize) { handler.failed(new RequestException( Response.Status.REQUEST_TOO_LONG, messageTemplate.formatted(written, maxContentSize))); return; } delegate.write(buf, handler); } @Override public void close(CompletionHandler h) { delegate.close(h); } @Override public void onError(Throwable t) { delegate.onError(t); } } }
Only the parameters `onnxExecutionMode`, `onnxInteropThreads`, `onnxIntraopThreads` and `onnxGpuDevice` should be included in `OnnxModelOptions`. The other parameters to not control how to the Onnx model is loaded, only its input/output.
public BertEmbedder(ApplicationContainerCluster cluster, Element xml, DeployState state) { super("ai.vespa.embedding.BertBaseEmbedder", INTEGRATION_BUNDLE_NAME, xml); var model = Model.fromXml(state, xml, "transformer-model").orElseThrow(); this.onnxModelOptions = new OnnxModelOptions( Optional.of(model.modelReference()), Optional.of(Model.fromXml(state, xml, "tokenizer-vocab").orElseThrow().modelReference()), getChildValue(xml, "max-tokens").map(Integer::parseInt), getChildValue(xml, "transformer-input-ids"), getChildValue(xml, "transformer-attention-mask"), getChildValue(xml, "transformer-token-type-ids"), getChildValue(xml, "transformer-output"), getChildValue(xml, "normalize").map(Boolean::parseBoolean), getChildValue(xml, "onnx-execution-mode"), getChildValue(xml, "onnx-interop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-intraop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-gpu-device").map(Integer::parseInt).map(OnnxModelOptions.GpuDevice::new), getChildValue(xml, "pooling-strategy"), getChildValue(xml, "transformer-start-sequence-token").map(Integer::parseInt), getChildValue(xml, "transformer-end-sequence-token").map(Integer::parseInt)); model.registerOnnxModelCost(cluster); }
getChildValue(xml, "transformer-token-type-ids"),
public BertEmbedder(ApplicationContainerCluster cluster, Element xml, DeployState state) { super("ai.vespa.embedding.BertBaseEmbedder", INTEGRATION_BUNDLE_NAME, xml); var model = Model.fromXml(state, xml, "transformer-model").orElseThrow(); this.onnxModelOptions = new OnnxModelOptions( getChildValue(xml, "onnx-execution-mode"), getChildValue(xml, "onnx-interop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-intraop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-gpu-device").map(Integer::parseInt).map(OnnxModelOptions.GpuDevice::new)); modelRef = model.modelReference(); vocabRef = Model.fromXml(state, xml, "tokenizer-vocab").orElseThrow().modelReference(); maxTokens = getChildValue(xml, "max-tokens").map(Integer::parseInt).orElse(null); transformerInputIds = getChildValue(xml, "transformer-input-ids").orElse(null); transformerAttentionMask = getChildValue(xml, "transformer-attention-mask").orElse(null); transformerTokenTypeIds = getChildValue(xml, "transformer-token-type-ids").orElse(null); transformerOutput = getChildValue(xml, "transformer-output").orElse(null); transformerStartSequenceToken = getChildValue(xml, "transformer-start-sequence-token").map(Integer::parseInt).orElse(null); transformerEndSequenceToken = getChildValue(xml, "transformer-end-sequence-token").map(Integer::parseInt).orElse(null); poolingStrategy = getChildValue(xml, "pooling-strategy").orElse(null); model.registerOnnxModelCost(cluster); }
class BertEmbedder extends TypedComponent implements BertBaseEmbedderConfig.Producer { private final OnnxModelOptions onnxModelOptions; @Override public void getConfig(BertBaseEmbedderConfig.Builder b) { b.transformerModel(onnxModelOptions.modelRef().get()).tokenizerVocab(onnxModelOptions.vocabRef().get()); onnxModelOptions.maxTokens().ifPresent(b::transformerMaxTokens); onnxModelOptions.transformerInputIds().ifPresent(b::transformerInputIds); onnxModelOptions.transformerAttentionMask().ifPresent(b::transformerAttentionMask); onnxModelOptions.transformerTokenTypeIds().ifPresent(b::transformerTokenTypeIds); onnxModelOptions.transformerOutput().ifPresent(b::transformerOutput); onnxModelOptions.transformerStartSequenceToken().ifPresent(b::transformerStartSequenceToken); onnxModelOptions.transformerEndSequenceToken().ifPresent(b::transformerEndSequenceToken); onnxModelOptions.poolingStrategy().ifPresent(value -> b.poolingStrategy(PoolingStrategy.Enum.valueOf(value))); onnxModelOptions.executionMode().ifPresent(value -> b.onnxExecutionMode(OnnxExecutionMode.Enum.valueOf(value))); onnxModelOptions.interOpThreads().ifPresent(b::onnxInterOpThreads); onnxModelOptions.intraOpThreads().ifPresent(b::onnxIntraOpThreads); onnxModelOptions.gpuDevice().ifPresent(value -> b.onnxGpuDevice(value.deviceNumber())); } }
class BertEmbedder extends TypedComponent implements BertBaseEmbedderConfig.Producer { private final OnnxModelOptions onnxModelOptions; private final ModelReference modelRef; private final ModelReference vocabRef; private final Integer maxTokens; private final String transformerInputIds; private final String transformerAttentionMask; private final String transformerTokenTypeIds; private final String transformerOutput; private final Integer transformerStartSequenceToken; private final Integer transformerEndSequenceToken; private final String poolingStrategy; @Override public void getConfig(BertBaseEmbedderConfig.Builder b) { b.transformerModel(modelRef).tokenizerVocab(vocabRef); if (maxTokens != null) b.transformerMaxTokens(maxTokens); if (transformerInputIds != null) b.transformerInputIds(transformerInputIds); if (transformerAttentionMask != null) b.transformerAttentionMask(transformerAttentionMask); if (transformerTokenTypeIds != null) b.transformerTokenTypeIds(transformerTokenTypeIds); if (transformerOutput != null) b.transformerOutput(transformerOutput); if (transformerStartSequenceToken != null) b.transformerStartSequenceToken(transformerStartSequenceToken); if (transformerEndSequenceToken != null) b.transformerEndSequenceToken(transformerEndSequenceToken); if (poolingStrategy != null) b.poolingStrategy(PoolingStrategy.Enum.valueOf(poolingStrategy)); onnxModelOptions.executionMode().ifPresent(value -> b.onnxExecutionMode(OnnxExecutionMode.Enum.valueOf(value))); onnxModelOptions.interOpThreads().ifPresent(b::onnxInterOpThreads); onnxModelOptions.intraOpThreads().ifPresent(b::onnxIntraOpThreads); onnxModelOptions.gpuDevice().ifPresent(value -> b.onnxGpuDevice(value.deviceNumber())); } }
Fixed
public BertEmbedder(ApplicationContainerCluster cluster, Element xml, DeployState state) { super("ai.vespa.embedding.BertBaseEmbedder", INTEGRATION_BUNDLE_NAME, xml); var model = Model.fromXml(state, xml, "transformer-model").orElseThrow(); this.onnxModelOptions = new OnnxModelOptions( Optional.of(model.modelReference()), Optional.of(Model.fromXml(state, xml, "tokenizer-vocab").orElseThrow().modelReference()), getChildValue(xml, "max-tokens").map(Integer::parseInt), getChildValue(xml, "transformer-input-ids"), getChildValue(xml, "transformer-attention-mask"), getChildValue(xml, "transformer-token-type-ids"), getChildValue(xml, "transformer-output"), getChildValue(xml, "normalize").map(Boolean::parseBoolean), getChildValue(xml, "onnx-execution-mode"), getChildValue(xml, "onnx-interop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-intraop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-gpu-device").map(Integer::parseInt).map(OnnxModelOptions.GpuDevice::new), getChildValue(xml, "pooling-strategy"), getChildValue(xml, "transformer-start-sequence-token").map(Integer::parseInt), getChildValue(xml, "transformer-end-sequence-token").map(Integer::parseInt)); model.registerOnnxModelCost(cluster); }
getChildValue(xml, "transformer-token-type-ids"),
public BertEmbedder(ApplicationContainerCluster cluster, Element xml, DeployState state) { super("ai.vespa.embedding.BertBaseEmbedder", INTEGRATION_BUNDLE_NAME, xml); var model = Model.fromXml(state, xml, "transformer-model").orElseThrow(); this.onnxModelOptions = new OnnxModelOptions( getChildValue(xml, "onnx-execution-mode"), getChildValue(xml, "onnx-interop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-intraop-threads").map(Integer::parseInt), getChildValue(xml, "onnx-gpu-device").map(Integer::parseInt).map(OnnxModelOptions.GpuDevice::new)); modelRef = model.modelReference(); vocabRef = Model.fromXml(state, xml, "tokenizer-vocab").orElseThrow().modelReference(); maxTokens = getChildValue(xml, "max-tokens").map(Integer::parseInt).orElse(null); transformerInputIds = getChildValue(xml, "transformer-input-ids").orElse(null); transformerAttentionMask = getChildValue(xml, "transformer-attention-mask").orElse(null); transformerTokenTypeIds = getChildValue(xml, "transformer-token-type-ids").orElse(null); transformerOutput = getChildValue(xml, "transformer-output").orElse(null); transformerStartSequenceToken = getChildValue(xml, "transformer-start-sequence-token").map(Integer::parseInt).orElse(null); transformerEndSequenceToken = getChildValue(xml, "transformer-end-sequence-token").map(Integer::parseInt).orElse(null); poolingStrategy = getChildValue(xml, "pooling-strategy").orElse(null); model.registerOnnxModelCost(cluster); }
class BertEmbedder extends TypedComponent implements BertBaseEmbedderConfig.Producer { private final OnnxModelOptions onnxModelOptions; @Override public void getConfig(BertBaseEmbedderConfig.Builder b) { b.transformerModel(onnxModelOptions.modelRef().get()).tokenizerVocab(onnxModelOptions.vocabRef().get()); onnxModelOptions.maxTokens().ifPresent(b::transformerMaxTokens); onnxModelOptions.transformerInputIds().ifPresent(b::transformerInputIds); onnxModelOptions.transformerAttentionMask().ifPresent(b::transformerAttentionMask); onnxModelOptions.transformerTokenTypeIds().ifPresent(b::transformerTokenTypeIds); onnxModelOptions.transformerOutput().ifPresent(b::transformerOutput); onnxModelOptions.transformerStartSequenceToken().ifPresent(b::transformerStartSequenceToken); onnxModelOptions.transformerEndSequenceToken().ifPresent(b::transformerEndSequenceToken); onnxModelOptions.poolingStrategy().ifPresent(value -> b.poolingStrategy(PoolingStrategy.Enum.valueOf(value))); onnxModelOptions.executionMode().ifPresent(value -> b.onnxExecutionMode(OnnxExecutionMode.Enum.valueOf(value))); onnxModelOptions.interOpThreads().ifPresent(b::onnxInterOpThreads); onnxModelOptions.intraOpThreads().ifPresent(b::onnxIntraOpThreads); onnxModelOptions.gpuDevice().ifPresent(value -> b.onnxGpuDevice(value.deviceNumber())); } }
class BertEmbedder extends TypedComponent implements BertBaseEmbedderConfig.Producer { private final OnnxModelOptions onnxModelOptions; private final ModelReference modelRef; private final ModelReference vocabRef; private final Integer maxTokens; private final String transformerInputIds; private final String transformerAttentionMask; private final String transformerTokenTypeIds; private final String transformerOutput; private final Integer transformerStartSequenceToken; private final Integer transformerEndSequenceToken; private final String poolingStrategy; @Override public void getConfig(BertBaseEmbedderConfig.Builder b) { b.transformerModel(modelRef).tokenizerVocab(vocabRef); if (maxTokens != null) b.transformerMaxTokens(maxTokens); if (transformerInputIds != null) b.transformerInputIds(transformerInputIds); if (transformerAttentionMask != null) b.transformerAttentionMask(transformerAttentionMask); if (transformerTokenTypeIds != null) b.transformerTokenTypeIds(transformerTokenTypeIds); if (transformerOutput != null) b.transformerOutput(transformerOutput); if (transformerStartSequenceToken != null) b.transformerStartSequenceToken(transformerStartSequenceToken); if (transformerEndSequenceToken != null) b.transformerEndSequenceToken(transformerEndSequenceToken); if (poolingStrategy != null) b.poolingStrategy(PoolingStrategy.Enum.valueOf(poolingStrategy)); onnxModelOptions.executionMode().ifPresent(value -> b.onnxExecutionMode(OnnxExecutionMode.Enum.valueOf(value))); onnxModelOptions.interOpThreads().ifPresent(b::onnxInterOpThreads); onnxModelOptions.intraOpThreads().ifPresent(b::onnxIntraOpThreads); onnxModelOptions.gpuDevice().ifPresent(value -> b.onnxGpuDevice(value.deviceNumber())); } }
Please add a comment about this extension of the Item range, which is originally only 5 bits.
protected void encodeThis(ByteBuffer buffer) { byte CODE_MASK = 0b00011111; byte FEAT_WEIGHT = 0b00100000; byte FEAT_UNIQUEID = 0b01000000; byte FEAT_FLAGS = -0b10000000; int code = getCode(); byte type = code >= CODE_MASK ? CODE_MASK : (byte) code; if (code >= 0x80 + CODE_MASK) throw new IllegalStateException("must increase number of bytes in serialization format for queries"); if (weight != DEFAULT_WEIGHT) { type |= FEAT_WEIGHT; } if (hasUniqueID()) { type |= FEAT_UNIQUEID; } byte flags = getFlagsFeature(); if (flags != 0) { type |= FEAT_FLAGS; } buffer.put(type); if (code >= CODE_MASK) { buffer.put((byte) (code - CODE_MASK)); } if ((type & FEAT_WEIGHT) != 0) { IntegerCompressor.putCompressedNumber(weight, buffer); } if ((type & FEAT_UNIQUEID) != 0) { IntegerCompressor.putCompressedPositiveNumber(uniqueID, buffer); } if ((type & FEAT_FLAGS) != 0) { buffer.put(flags); } }
if (code >= 0x80 + CODE_MASK)
protected void encodeThis(ByteBuffer buffer) { byte CODE_MASK = 0b00011111; byte FEAT_WEIGHT = 0b00100000; byte FEAT_UNIQUEID = 0b01000000; byte FEAT_FLAGS = -0b10000000; int code = getCode(); byte type = code >= CODE_MASK ? CODE_MASK : (byte) code; if (code >= 0x80 + CODE_MASK) throw new IllegalStateException("must increase number of bytes in serialization format for queries"); if (weight != DEFAULT_WEIGHT) { type |= FEAT_WEIGHT; } if (hasUniqueID()) { type |= FEAT_UNIQUEID; } byte flags = getFlagsFeature(); if (flags != 0) { type |= FEAT_FLAGS; } buffer.put(type); if (code >= CODE_MASK) { buffer.put((byte) (code - CODE_MASK)); } if ((type & FEAT_WEIGHT) != 0) { IntegerCompressor.putCompressedNumber(weight, buffer); } if ((type & FEAT_UNIQUEID) != 0) { IntegerCompressor.putCompressedPositiveNumber(uniqueID, buffer); } if ((type & FEAT_FLAGS) != 0) { buffer.put(flags); } }
class Item implements Cloneable { public enum ItemType { OR(0), AND(1), NOT(2), RANK(3), WORD(4), INT(5), PHRASE(6), MULTI_TERM(7), PREFIX(8), SUBSTRING(9), NEAR(11), ONEAR(12), SUFFIX(13), EQUIV(14), WEIGHTEDSET(15), WEAK_AND(16), EXACT(17), SAME_ELEMENT(18), PURE_WEIGHTED_STRING(19), PURE_WEIGHTED_INTEGER(20), DOTPRODUCT(21), WAND(22), PREDICATE_QUERY(23), REGEXP(24), WORD_ALTERNATIVES(25), NEAREST_NEIGHBOR(26), GEO_LOCATION_TERM(27), TRUE(28), FALSE(29), FUZZY(30), STRING_IN(31), NUMERIC_IN(32); public final int code; ItemType(int code) { this.code = code; } } @Deprecated public enum ItemCreator { ORIG(0), FILTER(1); public final int code; ItemCreator(int code) { this.code = code; } } public static final int DEFAULT_WEIGHT = 100; /** The relative importance of this term in the query. Default is 100 */ private int weight = DEFAULT_WEIGHT; private boolean fromSpecialToken = false; private ItemCreator creator = ItemCreator.ORIG; /** The parent in the query tree, or null if this is a root */ private CompositeItem parent = null; /** The annotations made on this item */ private CopyOnWriteHashMap<String, Object> annotations; /** Whether this item should affect ranking. */ private boolean isRanked = true; /** Whether position data should be used when ranking this item */ private boolean usePositionData = true; /** Whether the item should encode a unique ID */ private boolean hasUniqueID = false; /** Optional symbolic name for this item, requires unique id */ private String label = null; /** Unique identifier to address the item for external annotation */ protected int uniqueID = 0; /** Items for explicit connectivity */ protected Item connectedItem; protected Item connectedBacklink; protected double connectivity = 0; /** Explicit term significance */ protected double significance = 0; protected boolean explicitSignificance = false; /** Whether this item is eligible for change by query rewriters (false) or should be kept as-is (true) */ private boolean isProtected; private Language language = Language.UNKNOWN; /** Sets the index name of this item */ public abstract void setIndexName(String index); /** Returns the int code of this item */ public final int getCode() { return getItemType().code; } /** Return the enumerated type of this item. */ public abstract ItemType getItemType(); /** Returns the name of this item */ public abstract String getName(); /** * Sets whether this is a filter term. Filter terms are treated just like other terms except that * they will not be highlighted in dynamic summaries. */ public void setFilter(boolean filter) { if (filter) { creator = ItemCreator.FILTER; } else { creator = ItemCreator.ORIG; } } /** Returns whether this is a filter term */ public boolean isFilter() { return creator == ItemCreator.FILTER; } /** * Returns the item creator value. * * @deprecated use isFilter(boolean) */ @Deprecated public ItemCreator getCreator() { return creator; } /** * Sets the item creator value. * * @deprecated use setFilter(boolean) */ @Deprecated public void setCreator(ItemCreator creator) { this.creator = creator; } /** Sets the relative importance of this term */ public void setWeight(int w) { weight = w; } /** Returns the relative importance of this term. Default is 100. */ public int getWeight() { return weight; } /** * Annotate this item * * @param key the annotation key * @param value the value, or null to set a valueless annotation */ public void addAnnotation(String key, Object value) { if (annotations == null) annotations = new CopyOnWriteHashMap<>(); annotations.put(key, value); } /** Returns an annotation on this item, or null if the annotation is not set */ public Object getAnnotation(String annotation) { if (annotations == null) { return null; } return annotations.get(annotation); } /** * Returns whether this has an annotation */ public boolean hasAnnotation(String annotation) { if (annotations == null) return false; return annotations.containsKey(annotation); } /** Set whether this should be protected from change/remove by query rewriters */ public void setProtected(boolean isProtected) { this.isProtected=isProtected; } /** Returns whether this is to be protected from change/remove by query rewriters. default is false */ public boolean isProtected() { return isProtected; } /** Sets the parent in the tree. Do not use: Only to be called from CompositeItem/QueryTree */ public void setParent(CompositeItem parent) { this.parent = parent; } /** Returns the parent in the query tree, or null if this node has no parent */ public CompositeItem getParent() { return parent; } public abstract int encode(ByteBuffer buffer); /** * Returns an integer that contains all feature flags for this item. * * @return the feature flags */ private byte getFlagsFeature() { byte FLAGS_NORANK = 0x01; byte FLAGS_SPECIALTOKEN = 0x02; byte FLAGS_NOPOSITIONDATA = 0x04; byte FLAGS_ISFILTER = 0x08; byte ret = 0; if (!isRanked()) { ret |= FLAGS_NORANK; } if (isFromSpecialToken()) { ret |= FLAGS_SPECIALTOKEN; } if (!usePositionData()) { ret |= FLAGS_NOPOSITIONDATA; } if (isFilter()) { ret |= FLAGS_ISFILTER; } return ret; } /** Utility method for turning a string into utf-8 bytes */ protected static byte[] getBytes(String string) { return Utf8.toBytes(string); } public static void putString(String s, ByteBuffer buffer) { putBytes(Utf8.toBytes(s), buffer); } public static void putBytes(byte[] bytes, ByteBuffer buffer) { IntegerCompressor.putCompressedPositiveNumber(bytes.length, buffer); buffer.put(bytes); } public abstract int getTermCount(); /** * Returns the canonical query language string of this item. * The canonical language represent an item by the string * <pre> * ([itemName] [body]) * </pre> * where the body may recursively be other items. */ @Override public String toString() { StringBuilder buffer = new StringBuilder(); if (shouldParenthesize()) { buffer.append("("); } if (isFilter()) { buffer.append("|"); } appendHeadingString(buffer); appendBodyString(buffer); if (shouldParenthesize()) { buffer.append(")"); } if (weight != DEFAULT_WEIGHT) { buffer.append("!"); buffer.append(weight); } return buffer.toString(); } /** * Returns whether this item should be parenthesized when printed. * Default is false - no parentheses */ protected boolean shouldParenthesize() { return false; } /** Appends the heading of this string. As default getName() followed by a space. */ protected void appendHeadingString(StringBuilder buffer) { buffer.append(getName()); buffer.append(" "); } /** * Override to append the item body in the canonical query language of this item. * An item is usually represented by the string * <pre> * ([itemName] [body]) * </pre> * The body must be appended appended by this method. */ protected abstract void appendBodyString(StringBuilder buffer); /** Returns a deep copy of this item */ @Override public Item clone() { try { Item clone = (Item)super.clone(); if (this.annotations != null) clone.annotations = this.annotations.clone(); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException("Someone made Item uncloneable"); } } /** Returns whether this item is of the same class and contains the same state as the given item. */ @Override public boolean equals(Object o) { if (o == null) return false; if (o == this) return true; if (o.getClass() != this.getClass()) return false; Item other = (Item)o; if (this.weight != other.weight) return false; if (this.fromSpecialToken != other.fromSpecialToken) return false; if (this.creator != other.creator) return false; if ( ! Objects.equals(this.annotations, other.annotations)) return false; if (this.isRanked != other.isRanked) return false; if (this.usePositionData != other.usePositionData) return false; if ( ! Objects.equals(this.label, other.label)) return false; if (this.uniqueID != other.uniqueID) return false; if ( ! Objects.equals(this.connectedItem, other.connectedItem)) return false; if (this.connectivity != other.connectivity) return false; if (this.significance != other.significance) return false; if (this.language != other.language) return false; return true; } @Override public int hashCode() { return Objects.hash(weight, fromSpecialToken, creator, annotations, isRanked, usePositionData, label, uniqueID, connectedItem, connectivity, significance, language); } protected boolean hasUniqueID() { return hasUniqueID; } protected void setHasUniqueID(boolean hasUniqueID) { this.hasUniqueID = hasUniqueID; } /** * Label this item with a symbolic name which can later be used by * the back-end to identify specific items for ranking purposes. * * @param label label for this item */ public void setLabel(String label) { setHasUniqueID(true); this.label = label; } /** Returns the label for this item. This method will return null if no label has been set. */ public String getLabel() { return label; } /** * Sets whether this term item should affect ranking. * If set to false this term is not exposed to the ranking framework in the search backend. */ public void setRanked(boolean isRanked) { this.isRanked = isRanked; } /** Returns whether this item should affect ranking. */ public boolean isRanked() { return isRanked; } /** * Sets whether position data should be used when ranking this term item. * If set to false the search backend uses fast bit vector data structures when matching on this term * and only a few simple ranking features will be available when ranking this term. * Note that setting this to false also saves a lot of CPU during matching as bit vector data structures are used. */ public void setPositionData(boolean usePositionData) { this.usePositionData = usePositionData; } /** Returns whether position data should be used when ranking this item */ public boolean usePositionData() { return usePositionData; } public void disclose(Discloser discloser) { if (connectivity != 0) discloser.addProperty("connectivity", connectivity); if (connectedItem != null) discloser.addProperty("connectedItem", connectedItem); if (creator != ItemCreator.ORIG) discloser.addProperty("creator", creator); if ( ! isRanked) discloser.addProperty("isRanked", isRanked); if ( ! usePositionData) discloser.addProperty("usePositionData", usePositionData); if (explicitSignificance) discloser.addProperty("significance", significance); if (weight != 100) discloser.addProperty("weight", weight); if (label != null) discloser.addProperty("label", label); if (hasUniqueID) discloser.addProperty("uniqueID", uniqueID); } public boolean isFromSpecialToken() { return fromSpecialToken; } public void setFromSpecialToken(boolean fromSpecialToken) { this.fromSpecialToken = fromSpecialToken; } /** Returns the language of any natural language text below this item, or Language.UNKNOWN if not set. */ public Language getLanguage() { return language; } /** * Sets the language of any natural language text below this item. * This cannot be set to null but can be set to Language.UNKNOWN */ public void setLanguage(Language language) { Objects.requireNonNull(language, "Language cannot be null"); this.language = language; } /** * DO NOT USE */ public boolean hasConnectivityBackLink() { return connectedBacklink != null; } /** Returns true if this is the root item - that is if the parent is the QueryTree (or null for legacy reasons)*/ public boolean isRoot() { if (getParent()==null) return true; if (getParent() instanceof QueryTree) return true; return false; } }
class Item implements Cloneable { public enum ItemType { OR(0), AND(1), NOT(2), RANK(3), WORD(4), INT(5), PHRASE(6), MULTI_TERM(7), PREFIX(8), SUBSTRING(9), NEAR(11), ONEAR(12), SUFFIX(13), EQUIV(14), WEIGHTEDSET(15), WEAK_AND(16), EXACT(17), SAME_ELEMENT(18), PURE_WEIGHTED_STRING(19), PURE_WEIGHTED_INTEGER(20), DOTPRODUCT(21), WAND(22), PREDICATE_QUERY(23), REGEXP(24), WORD_ALTERNATIVES(25), NEAREST_NEIGHBOR(26), GEO_LOCATION_TERM(27), TRUE(28), FALSE(29), FUZZY(30), STRING_IN(31), NUMERIC_IN(32); public final int code; ItemType(int code) { this.code = code; } } @Deprecated public enum ItemCreator { ORIG(0), FILTER(1); public final int code; ItemCreator(int code) { this.code = code; } } public static final int DEFAULT_WEIGHT = 100; /** The relative importance of this term in the query. Default is 100 */ private int weight = DEFAULT_WEIGHT; private boolean fromSpecialToken = false; private ItemCreator creator = ItemCreator.ORIG; /** The parent in the query tree, or null if this is a root */ private CompositeItem parent = null; /** The annotations made on this item */ private CopyOnWriteHashMap<String, Object> annotations; /** Whether this item should affect ranking. */ private boolean isRanked = true; /** Whether position data should be used when ranking this item */ private boolean usePositionData = true; /** Whether the item should encode a unique ID */ private boolean hasUniqueID = false; /** Optional symbolic name for this item, requires unique id */ private String label = null; /** Unique identifier to address the item for external annotation */ protected int uniqueID = 0; /** Items for explicit connectivity */ protected Item connectedItem; protected Item connectedBacklink; protected double connectivity = 0; /** Explicit term significance */ protected double significance = 0; protected boolean explicitSignificance = false; /** Whether this item is eligible for change by query rewriters (false) or should be kept as-is (true) */ private boolean isProtected; private Language language = Language.UNKNOWN; /** Sets the index name of this item */ public abstract void setIndexName(String index); /** Returns the int code of this item */ public final int getCode() { return getItemType().code; } /** Return the enumerated type of this item. */ public abstract ItemType getItemType(); /** Returns the name of this item */ public abstract String getName(); /** * Sets whether this is a filter term. Filter terms are treated just like other terms except that * they will not be highlighted in dynamic summaries. */ public void setFilter(boolean filter) { if (filter) { creator = ItemCreator.FILTER; } else { creator = ItemCreator.ORIG; } } /** Returns whether this is a filter term */ public boolean isFilter() { return creator == ItemCreator.FILTER; } /** * Returns the item creator value. * * @deprecated use isFilter(boolean) */ @Deprecated public ItemCreator getCreator() { return creator; } /** * Sets the item creator value. * * @deprecated use setFilter(boolean) */ @Deprecated public void setCreator(ItemCreator creator) { this.creator = creator; } /** Sets the relative importance of this term */ public void setWeight(int w) { weight = w; } /** Returns the relative importance of this term. Default is 100. */ public int getWeight() { return weight; } /** * Annotate this item * * @param key the annotation key * @param value the value, or null to set a valueless annotation */ public void addAnnotation(String key, Object value) { if (annotations == null) annotations = new CopyOnWriteHashMap<>(); annotations.put(key, value); } /** Returns an annotation on this item, or null if the annotation is not set */ public Object getAnnotation(String annotation) { if (annotations == null) { return null; } return annotations.get(annotation); } /** * Returns whether this has an annotation */ public boolean hasAnnotation(String annotation) { if (annotations == null) return false; return annotations.containsKey(annotation); } /** Set whether this should be protected from change/remove by query rewriters */ public void setProtected(boolean isProtected) { this.isProtected=isProtected; } /** Returns whether this is to be protected from change/remove by query rewriters. default is false */ public boolean isProtected() { return isProtected; } /** Sets the parent in the tree. Do not use: Only to be called from CompositeItem/QueryTree */ public void setParent(CompositeItem parent) { this.parent = parent; } /** Returns the parent in the query tree, or null if this node has no parent */ public CompositeItem getParent() { return parent; } public abstract int encode(ByteBuffer buffer); /** * Returns an integer that contains all feature flags for this item. * * @return the feature flags */ private byte getFlagsFeature() { byte FLAGS_NORANK = 0x01; byte FLAGS_SPECIALTOKEN = 0x02; byte FLAGS_NOPOSITIONDATA = 0x04; byte FLAGS_ISFILTER = 0x08; byte ret = 0; if (!isRanked()) { ret |= FLAGS_NORANK; } if (isFromSpecialToken()) { ret |= FLAGS_SPECIALTOKEN; } if (!usePositionData()) { ret |= FLAGS_NOPOSITIONDATA; } if (isFilter()) { ret |= FLAGS_ISFILTER; } return ret; } /** Utility method for turning a string into utf-8 bytes */ protected static byte[] getBytes(String string) { return Utf8.toBytes(string); } public static void putString(String s, ByteBuffer buffer) { putBytes(Utf8.toBytes(s), buffer); } public static void putBytes(byte[] bytes, ByteBuffer buffer) { IntegerCompressor.putCompressedPositiveNumber(bytes.length, buffer); buffer.put(bytes); } public abstract int getTermCount(); /** * Returns the canonical query language string of this item. * The canonical language represent an item by the string * <pre> * ([itemName] [body]) * </pre> * where the body may recursively be other items. */ @Override public String toString() { StringBuilder buffer = new StringBuilder(); if (shouldParenthesize()) { buffer.append("("); } if (isFilter()) { buffer.append("|"); } appendHeadingString(buffer); appendBodyString(buffer); if (shouldParenthesize()) { buffer.append(")"); } if (weight != DEFAULT_WEIGHT) { buffer.append("!"); buffer.append(weight); } return buffer.toString(); } /** * Returns whether this item should be parenthesized when printed. * Default is false - no parentheses */ protected boolean shouldParenthesize() { return false; } /** Appends the heading of this string. As default getName() followed by a space. */ protected void appendHeadingString(StringBuilder buffer) { buffer.append(getName()); buffer.append(" "); } /** * Override to append the item body in the canonical query language of this item. * An item is usually represented by the string * <pre> * ([itemName] [body]) * </pre> * The body must be appended appended by this method. */ protected abstract void appendBodyString(StringBuilder buffer); /** Returns a deep copy of this item */ @Override public Item clone() { try { Item clone = (Item)super.clone(); if (this.annotations != null) clone.annotations = this.annotations.clone(); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException("Someone made Item uncloneable"); } } /** Returns whether this item is of the same class and contains the same state as the given item. */ @Override public boolean equals(Object o) { if (o == null) return false; if (o == this) return true; if (o.getClass() != this.getClass()) return false; Item other = (Item)o; if (this.weight != other.weight) return false; if (this.fromSpecialToken != other.fromSpecialToken) return false; if (this.creator != other.creator) return false; if ( ! Objects.equals(this.annotations, other.annotations)) return false; if (this.isRanked != other.isRanked) return false; if (this.usePositionData != other.usePositionData) return false; if ( ! Objects.equals(this.label, other.label)) return false; if (this.uniqueID != other.uniqueID) return false; if ( ! Objects.equals(this.connectedItem, other.connectedItem)) return false; if (this.connectivity != other.connectivity) return false; if (this.significance != other.significance) return false; if (this.language != other.language) return false; return true; } @Override public int hashCode() { return Objects.hash(weight, fromSpecialToken, creator, annotations, isRanked, usePositionData, label, uniqueID, connectedItem, connectivity, significance, language); } protected boolean hasUniqueID() { return hasUniqueID; } protected void setHasUniqueID(boolean hasUniqueID) { this.hasUniqueID = hasUniqueID; } /** * Label this item with a symbolic name which can later be used by * the back-end to identify specific items for ranking purposes. * * @param label label for this item */ public void setLabel(String label) { setHasUniqueID(true); this.label = label; } /** Returns the label for this item. This method will return null if no label has been set. */ public String getLabel() { return label; } /** * Sets whether this term item should affect ranking. * If set to false this term is not exposed to the ranking framework in the search backend. */ public void setRanked(boolean isRanked) { this.isRanked = isRanked; } /** Returns whether this item should affect ranking. */ public boolean isRanked() { return isRanked; } /** * Sets whether position data should be used when ranking this term item. * If set to false the search backend uses fast bit vector data structures when matching on this term * and only a few simple ranking features will be available when ranking this term. * Note that setting this to false also saves a lot of CPU during matching as bit vector data structures are used. */ public void setPositionData(boolean usePositionData) { this.usePositionData = usePositionData; } /** Returns whether position data should be used when ranking this item */ public boolean usePositionData() { return usePositionData; } public void disclose(Discloser discloser) { if (connectivity != 0) discloser.addProperty("connectivity", connectivity); if (connectedItem != null) discloser.addProperty("connectedItem", connectedItem); if (creator != ItemCreator.ORIG) discloser.addProperty("creator", creator); if ( ! isRanked) discloser.addProperty("isRanked", isRanked); if ( ! usePositionData) discloser.addProperty("usePositionData", usePositionData); if (explicitSignificance) discloser.addProperty("significance", significance); if (weight != 100) discloser.addProperty("weight", weight); if (label != null) discloser.addProperty("label", label); if (hasUniqueID) discloser.addProperty("uniqueID", uniqueID); } public boolean isFromSpecialToken() { return fromSpecialToken; } public void setFromSpecialToken(boolean fromSpecialToken) { this.fromSpecialToken = fromSpecialToken; } /** Returns the language of any natural language text below this item, or Language.UNKNOWN if not set. */ public Language getLanguage() { return language; } /** * Sets the language of any natural language text below this item. * This cannot be set to null but can be set to Language.UNKNOWN */ public void setLanguage(Language language) { Objects.requireNonNull(language, "Language cannot be null"); this.language = language; } /** * DO NOT USE */ public boolean hasConnectivityBackLink() { return connectedBacklink != null; } /** Returns true if this is the root item - that is if the parent is the QueryTree (or null for legacy reasons)*/ public boolean isRoot() { if (getParent()==null) return true; if (getParent() instanceof QueryTree) return true; return false; } }
:laughing:
public static String toSeed(LoadBalancerId id) { return ":" + id.serializedForm() + ":"; }
return ":" + id.serializedForm() + ":";
public static String toSeed(LoadBalancerId id) { return ":" + id.serializedForm() + ":"; }
class LoadBalancerProvisioner { private static final Logger log = Logger.getLogger(LoadBalancerProvisioner.class.getName()); private final NodeRepository nodeRepository; private final CuratorDb db; private final LoadBalancerService service; private final BooleanFlag deactivateRouting; private final BooleanFlag ipv6AwsTargetGroups; private final IntFlag preProvisionPoolSize; public LoadBalancerProvisioner(NodeRepository nodeRepository, LoadBalancerService service) { this.nodeRepository = nodeRepository; this.db = nodeRepository.database(); this.service = service; this.deactivateRouting = PermanentFlags.DEACTIVATE_ROUTING.bindTo(nodeRepository.flagSource()); this.ipv6AwsTargetGroups = Flags.IPV6_AWS_TARGET_GROUPS.bindTo(nodeRepository.flagSource()); this.preProvisionPoolSize = PermanentFlags.PRE_PROVISIONED_LB_COUNT.bindTo(nodeRepository.flagSource()); for (var id : db.readLoadBalancerIds()) { try (var lock = db.lock(id.application())) { var loadBalancer = db.readLoadBalancer(id); loadBalancer.ifPresent(lb -> db.writeLoadBalancer(lb, lb.state())); } } } /** * Prepare a load balancer for given application and cluster. * <p> * If a load balancer for the cluster already exists, it will be reconfigured based on the currently allocated * nodes. Its state will remain unchanged. * <p> * If no load balancer exists, a new one will be provisioned in {@link LoadBalancer.State * <p> * Calling this for irrelevant node or cluster types is a no-op. */ public void prepare(ApplicationId application, ClusterSpec cluster, NodeSpec requested) { if (!shouldProvision(application, requested.type(), cluster.type())) return; try (var lock = db.lock(application)) { ClusterSpec.Id clusterId = effectiveId(cluster); LoadBalancerId loadBalancerId = requireNonClashing(new LoadBalancerId(application, clusterId)); prepare(loadBalancerId, cluster.zoneEndpoint(), requested); } } /** * Activate load balancer for given application and cluster. * <p> * If a load balancer for the cluster already exists, it will be reconfigured based on the currently allocated * nodes and the load balancer itself will be moved to {@link LoadBalancer.State * <p> * Load balancers for clusters that are no longer in given clusters are deactivated. * <p> * Calling this when no load balancer has been prepared for given cluster is a no-op. */ public void activate(Set<ClusterSpec> clusters, NodeList newActive, ApplicationTransaction transaction) { Map<ClusterSpec.Id, ZoneEndpoint> activatingClusters = clusters.stream() .collect(groupingBy(LoadBalancerProvisioner::effectiveId, reducing(ZoneEndpoint.defaultEndpoint, ClusterSpec::zoneEndpoint, (o, n) -> o.isDefault() ? n : o))); for (var cluster : loadBalancedClustersOf(newActive).entrySet()) { if ( ! activatingClusters.containsKey(cluster.getKey())) continue; Node clusterNode = cluster.getValue().first().get(); if ( ! shouldProvision(transaction.application(), clusterNode.type(), clusterNode.allocation().get().membership().cluster().type())) continue; activate(transaction, cluster.getKey(), activatingClusters.get(cluster.getKey()), cluster.getValue()); } deactivate(surplusLoadBalancersOf(transaction.application(), activatingClusters.keySet()), transaction.nested()); } /** * Deactivate all load balancers assigned to given application. This is a no-op if an application does not have any * load balancer(s). */ public void deactivate(ApplicationTransaction transaction) { deactivate(nodeRepository.loadBalancers().list(transaction.application()).asList(), transaction.nested()); } /** Returns whether to provision a load balancer for given application */ private boolean shouldProvision(ApplicationId application, NodeType nodeType, ClusterSpec.Type clusterType) { if (application.instance().isTester()) return false; if (!service.supports(nodeType, clusterType)) return false; return true; } /** Returns load balancers of given application that are no longer referenced by given clusters */ private List<LoadBalancer> surplusLoadBalancersOf(ApplicationId application, Set<ClusterSpec.Id> activeClusters) { var activeLoadBalancersByCluster = nodeRepository.loadBalancers().list(application) .in(LoadBalancer.State.active) .asList() .stream() .collect(Collectors.toMap(lb -> lb.id().cluster(), Function.identity())); var surplus = new ArrayList<LoadBalancer>(); for (var kv : activeLoadBalancersByCluster.entrySet()) { if (activeClusters.contains(kv.getKey())) continue; surplus.add(kv.getValue()); } return Collections.unmodifiableList(surplus); } private void deactivate(List<LoadBalancer> loadBalancers, NestedTransaction transaction) { var now = nodeRepository.clock().instant(); var deactivatedLoadBalancers = loadBalancers.stream() .map(lb -> lb.with(LoadBalancer.State.inactive, now)) .toList(); db.writeLoadBalancers(deactivatedLoadBalancers, LoadBalancer.State.active, transaction); } /** Find all load balancer IDs owned by given tenant and application */ private List<LoadBalancerId> findLoadBalancers(TenantName tenant, ApplicationName application) { return db.readLoadBalancerIds().stream() .filter(id -> id.application().tenant().equals(tenant) && id.application().application().equals(application)) .toList(); } /** Require that load balancer IDs do not clash. This prevents name clashing when compacting endpoint DNS names */ private LoadBalancerId requireNonClashing(LoadBalancerId loadBalancerId) { List<LoadBalancerId> loadBalancerIds = findLoadBalancers(loadBalancerId.application().tenant(), loadBalancerId.application().application()); List<String> nonCompactableIds = withoutCompactableIds(loadBalancerId); for (var id : loadBalancerIds) { if (id.equals(loadBalancerId)) continue; if (nonCompactableIds.equals(withoutCompactableIds(id))) { throw new IllegalArgumentException(loadBalancerId + " clashes with " + id); } } return loadBalancerId; } private void prepare(LoadBalancerId id, ZoneEndpoint zoneEndpoint, NodeSpec requested) { CloudAccount cloudAccount = requested.cloudAccount(); Optional<LoadBalancer> loadBalancer = db.readLoadBalancer(id); LoadBalancer newLoadBalancer = null; LoadBalancer.State fromState = loadBalancer.map(LoadBalancer::state).orElse(null); try { if (loadBalancer.isPresent() && ! inAccount(cloudAccount, loadBalancer.get())) { newLoadBalancer = loadBalancer.get().with(State.removable, nodeRepository.clock().instant()); throw new LoadBalancerServiceException("Could not (re)configure " + id + " due to change in cloud account. The operation will be retried on next deployment"); } if (loadBalancer.isPresent() && ! hasCorrectVisibility(loadBalancer.get(), zoneEndpoint)) { newLoadBalancer = loadBalancer.get().with(State.removable, nodeRepository.clock().instant()); throw new LoadBalancerServiceException("Could not (re)configure " + id + " due to change in load balancer visibility. The operation will be retried on next deployment"); } newLoadBalancer = loadBalancer.orElseGet(() -> createNewLoadBalancer(id, zoneEndpoint, requested)); newLoadBalancer = newLoadBalancer.with(provisionInstance(newLoadBalancer, zoneEndpoint, requested)); } catch (LoadBalancerServiceException e) { log.log(Level.WARNING, "Failed to provision load balancer", e); throw e; } finally { db.writeLoadBalancer(newLoadBalancer, fromState); } } private static boolean hasCorrectVisibility(LoadBalancer newLoadBalancer, ZoneEndpoint zoneEndpoint) { return newLoadBalancer.instance().isEmpty() || newLoadBalancer.instance().get().settings().isPublicEndpoint() == zoneEndpoint.isPublicEndpoint(); } /** Creates a new load balancer, with an instance if one is taken from the pool, or without otherwise. */ private LoadBalancer createNewLoadBalancer(LoadBalancerId id, ZoneEndpoint zoneEndpoint, NodeSpec requested) { LoadBalancerSpec spec = new LoadBalancerSpec(id.application(), id.cluster(), Set.of(), zoneEndpoint, requested.cloudAccount(), toSeed(id, requested.type())); return provisionFromPool(spec, requested.type()) .orElseGet(() -> new LoadBalancer(id, spec.idSeed(), Optional.empty(), State.reserved, nodeRepository.clock().instant())); } /** Provision a load balancer instance, if necessary */ private LoadBalancerInstance provisionInstance(LoadBalancer currentLoadBalancer, ZoneEndpoint zoneEndpoint, NodeSpec requested) { LoadBalancerId id = currentLoadBalancer.id(); Set<Real> reals = currentLoadBalancer.instance() .map(LoadBalancerInstance::reals) .orElse(Set.of()); ZoneEndpoint settings = new ZoneEndpoint(zoneEndpoint.isPublicEndpoint(), zoneEndpoint.isPrivateEndpoint(), currentLoadBalancer.instance() .map(LoadBalancerInstance::settings) .map(ZoneEndpoint::allowedUrns) .orElse(List.of())); if (currentLoadBalancer.instance().map(instance -> settings.equals(instance.settings())).orElse(false)) return currentLoadBalancer.instance().get(); log.log(Level.INFO, () -> "Provisioning instance for " + id); try { return service.provision(new LoadBalancerSpec(id.application(), id.cluster(), reals, settings, requested.cloudAccount(), currentLoadBalancer.idSeed())) .withServiceIds(currentLoadBalancer.instance().map(LoadBalancerInstance::serviceIds).orElse(List.of())); } catch (Exception e) { throw new LoadBalancerServiceException("Could not provision " + id + ". The operation will be retried on next deployment.", e); } } private Optional<LoadBalancer> provisionFromPool(LoadBalancerSpec spec, NodeType type) { if (type != NodeType.tenant) return Optional.empty(); if ( ! spec.settings().isDefault()) return Optional.empty(); if (preProvisionPoolSize.value() == 0) return Optional.empty(); try (Lock lock = db.lock(preProvisionOwner)) { long tail = db.readLoadBalancerPoolTail(); if (tail >= db.readLoadBalancerPoolHead()) return Optional.empty(); ClusterSpec.Id slot = slotId(tail); Optional<LoadBalancer> candidate = db.readLoadBalancer(new LoadBalancerId(preProvisionOwner, slot)); if (candidate.flatMap(LoadBalancer::instance).map(instance -> ! instance.cloudAccount().equals(spec.cloudAccount())).orElse(false)) return Optional.empty(); db.incrementLoadBalancerPoolTail(); LoadBalancer chosen = candidate.orElseThrow(() -> new IllegalStateException("could not find load balancer " + slot + " in pre-provisioned pool")); if (chosen.state() != State.active || chosen.instance().isEmpty()) throw new IllegalStateException("expected active load balancer in pre-provisioned pool, but got " + chosen); log.log(Level.INFO, "Using " + chosen + " from pre-provisioned pool"); service.reallocate(new LoadBalancerSpec(spec.application(), spec.cluster(), spec.reals(), spec.settings(), spec.cloudAccount(), chosen.idSeed())); db.removeLoadBalancer(chosen.id()); return Optional.of(new LoadBalancer(new LoadBalancerId(spec.application(), spec.cluster()), chosen.idSeed(), chosen.instance(), State.reserved, nodeRepository.clock().instant())); } catch (Exception e) { log.log(Level.WARNING, "Failed to provision load balancer from pool", e); } return Optional.empty(); } static ClusterSpec.Id slotId(long counter) { return ClusterSpec.Id.from(String.valueOf(counter)); } static long slotOf(ClusterSpec.Id id) { return Long.parseLong(id.value()); } /** Evict surplus and failed load balancers, and pre-provision deficit ones. Should only be run by a maintenance job. */ public void refreshPool() { int size = preProvisionPoolSize.value(); long head = db.readLoadBalancerPoolHead(); long tail = db.readLoadBalancerPoolTail(); try (Lock lock = db.lock(preProvisionOwner)) { while (head - tail > size) tail = db.incrementLoadBalancerPoolTail(); for (LoadBalancer lb : db.readLoadBalancers(l -> l.application().equals(preProvisionOwner)).values()) { long slot = slotOf(lb.id().cluster()); if (slot < tail) db.writeLoadBalancer(lb.with(State.removable, nodeRepository.clock().instant()), lb.state()); } } while (head - tail < size) { ClusterSpec.Id slot = slotId(head); LoadBalancerId id = new LoadBalancerId(preProvisionOwner, slot); LoadBalancerSpec spec = preProvisionSpec(slot, nodeRepository.zone().cloud().account(), toSeed(id)); db.writeLoadBalancer(new LoadBalancer(id, spec.idSeed(), Optional.of(service.provision(spec)), State.active, nodeRepository.clock().instant()), null); head = db.incrementLoadBalancerPoolHead(); } } public static String toSeed(LoadBalancerId id, NodeType type) { return type == NodeType.tenant ? toSeed(id) : toLegacySeed(id.application(), id.cluster()); } public static String toLegacySeed(ApplicationId application, ClusterSpec.Id cluster) { return application.tenant().value() + application.application().value() + application.instance().value() + cluster.value(); } private void activate(ApplicationTransaction transaction, ClusterSpec.Id cluster, ZoneEndpoint settings, NodeList nodes) { Instant now = nodeRepository.clock().instant(); LoadBalancerId id = new LoadBalancerId(transaction.application(), cluster); Optional<LoadBalancer> loadBalancer = db.readLoadBalancer(id); if (loadBalancer.isEmpty()) throw new IllegalArgumentException("Could not activate load balancer that was never prepared: " + id); if (loadBalancer.get().instance().isEmpty()) throw new IllegalArgumentException("Activating " + id + ", but prepare never provisioned a load balancer instance"); try { LoadBalancerInstance instance = configureInstance(id, nodes, loadBalancer.get(), settings, loadBalancer.get().instance().get().cloudAccount()); db.writeLoadBalancers(List.of(loadBalancer.get().with(instance).with(State.active, now)), loadBalancer.get().state(), transaction.nested()); } catch (LoadBalancerServiceException e) { db.writeLoadBalancers(List.of(loadBalancer.get()), loadBalancer.get().state(), transaction.nested()); throw e; } } /** Reconfigure a load balancer instance, if necessary */ private LoadBalancerInstance configureInstance(LoadBalancerId id, NodeList nodes, LoadBalancer currentLoadBalancer, ZoneEndpoint zoneEndpoint, CloudAccount cloudAccount) { boolean shouldDeactivateRouting = deactivateRouting.with(FetchVector.Dimension.INSTANCE_ID, id.application().serializedForm()) .value(); Set<Real> reals = shouldDeactivateRouting ? Set.of() : realsOf(nodes, cloudAccount); log.log(Level.FINE, () -> "Configuring instance for " + id + ", targeting: " + reals); try { return service.configure(currentLoadBalancer.instance().orElseThrow(() -> new IllegalArgumentException("expected existing instance for " + id)), new LoadBalancerSpec(id.application(), id.cluster(), reals, zoneEndpoint, cloudAccount, currentLoadBalancer.idSeed()), shouldDeactivateRouting || currentLoadBalancer.state() != LoadBalancer.State.active); } catch (Exception e) { throw new LoadBalancerServiceException("Could not (re)configure " + id + ", targeting: " + reals, e); } } /** Returns the load balanced clusters of given application and their nodes */ private Map<ClusterSpec.Id, NodeList> loadBalancedClustersOf(NodeList nodes) { if (nodes.stream().anyMatch(node -> node.type() == NodeType.config)) { nodes = nodes.nodeType(NodeType.config).type(ClusterSpec.Type.admin); } else if (nodes.stream().anyMatch(node -> node.type() == NodeType.controller)) { nodes = nodes.nodeType(NodeType.controller).container(); } else { nodes = nodes.nodeType(NodeType.tenant).container(); } return nodes.groupingBy(node -> effectiveId(node.allocation().get().membership().cluster())); } /** Returns real servers for given nodes */ private Set<Real> realsOf(NodeList nodes, CloudAccount cloudAccount) { Set<Real> reals = new LinkedHashSet<>(); for (var node : nodes) { for (var ip : reachableIpAddresses(node, cloudAccount)) { reals.add(new Real(HostName.of(node.hostname()), ip)); } } return reals; } /** Returns a list of the non-compactable IDs of given load balancer */ private static List<String> withoutCompactableIds(LoadBalancerId id) { List<String> ids = new ArrayList<>(2); if (!"default".equals(id.cluster().value())) { ids.add(id.cluster().value()); } if (!id.application().instance().isDefault()) { ids.add(id.application().instance().value()); } return ids; } /** Returns whether load balancer is provisioned in given account */ private boolean inAccount(CloudAccount cloudAccount, LoadBalancer loadBalancer) { return !nodeRepository.zone().cloud().allowEnclave() || loadBalancer.instance().isEmpty() || loadBalancer.instance().get().cloudAccount().equals(cloudAccount); } /** Find IP addresses reachable by the load balancer service */ private Set<String> reachableIpAddresses(Node node, CloudAccount cloudAccount) { Set<String> reachable = new LinkedHashSet<>(node.ipConfig().primary()); boolean forceIpv6 = ipv6AwsTargetGroups.with(FetchVector.Dimension.CLOUD_ACCOUNT, cloudAccount.account()).value(); var protocol = forceIpv6 ? LoadBalancerService.Protocol.ipv6 : service.protocol(node.cloudAccount().isExclave(nodeRepository.zone())); switch (protocol) { case ipv4 -> reachable.removeIf(IP::isV6); case ipv6 -> reachable.removeIf(IP::isV4); } return reachable; } private static ClusterSpec.Id effectiveId(ClusterSpec cluster) { return cluster.combinedId().orElse(cluster.id()); } }
class LoadBalancerProvisioner { private static final Logger log = Logger.getLogger(LoadBalancerProvisioner.class.getName()); private final NodeRepository nodeRepository; private final CuratorDb db; private final LoadBalancerService service; private final BooleanFlag deactivateRouting; private final BooleanFlag ipv6AwsTargetGroups; private final IntFlag preProvisionPoolSize; public LoadBalancerProvisioner(NodeRepository nodeRepository, LoadBalancerService service) { this.nodeRepository = nodeRepository; this.db = nodeRepository.database(); this.service = service; this.deactivateRouting = PermanentFlags.DEACTIVATE_ROUTING.bindTo(nodeRepository.flagSource()); this.ipv6AwsTargetGroups = Flags.IPV6_AWS_TARGET_GROUPS.bindTo(nodeRepository.flagSource()); this.preProvisionPoolSize = PermanentFlags.PRE_PROVISIONED_LB_COUNT.bindTo(nodeRepository.flagSource()); for (var id : db.readLoadBalancerIds()) { try (var lock = db.lock(id.application())) { var loadBalancer = db.readLoadBalancer(id); loadBalancer.ifPresent(lb -> db.writeLoadBalancer(lb, lb.state())); } } } /** * Prepare a load balancer for given application and cluster. * <p> * If a load balancer for the cluster already exists, it will be reconfigured based on the currently allocated * nodes. Its state will remain unchanged. * <p> * If no load balancer exists, a new one will be provisioned in {@link LoadBalancer.State * <p> * Calling this for irrelevant node or cluster types is a no-op. */ public void prepare(ApplicationId application, ClusterSpec cluster, NodeSpec requested) { if (!shouldProvision(application, requested.type(), cluster.type())) return; try (var lock = db.lock(application)) { ClusterSpec.Id clusterId = effectiveId(cluster); LoadBalancerId loadBalancerId = requireNonClashing(new LoadBalancerId(application, clusterId)); prepare(loadBalancerId, cluster.zoneEndpoint(), requested); } } /** * Activate load balancer for given application and cluster. * <p> * If a load balancer for the cluster already exists, it will be reconfigured based on the currently allocated * nodes and the load balancer itself will be moved to {@link LoadBalancer.State * <p> * Load balancers for clusters that are no longer in given clusters are deactivated. * <p> * Calling this when no load balancer has been prepared for given cluster is a no-op. */ public void activate(Set<ClusterSpec> clusters, NodeList newActive, ApplicationTransaction transaction) { Map<ClusterSpec.Id, ZoneEndpoint> activatingClusters = clusters.stream() .collect(groupingBy(LoadBalancerProvisioner::effectiveId, reducing(ZoneEndpoint.defaultEndpoint, ClusterSpec::zoneEndpoint, (o, n) -> o.isDefault() ? n : o))); for (var cluster : loadBalancedClustersOf(newActive).entrySet()) { if ( ! activatingClusters.containsKey(cluster.getKey())) continue; Node clusterNode = cluster.getValue().first().get(); if ( ! shouldProvision(transaction.application(), clusterNode.type(), clusterNode.allocation().get().membership().cluster().type())) continue; activate(transaction, cluster.getKey(), activatingClusters.get(cluster.getKey()), cluster.getValue()); } deactivate(surplusLoadBalancersOf(transaction.application(), activatingClusters.keySet()), transaction.nested()); } /** * Deactivate all load balancers assigned to given application. This is a no-op if an application does not have any * load balancer(s). */ public void deactivate(ApplicationTransaction transaction) { deactivate(nodeRepository.loadBalancers().list(transaction.application()).asList(), transaction.nested()); } /** Returns whether to provision a load balancer for given application */ private boolean shouldProvision(ApplicationId application, NodeType nodeType, ClusterSpec.Type clusterType) { if (application.instance().isTester()) return false; if (!service.supports(nodeType, clusterType)) return false; return true; } /** Returns load balancers of given application that are no longer referenced by given clusters */ private List<LoadBalancer> surplusLoadBalancersOf(ApplicationId application, Set<ClusterSpec.Id> activeClusters) { var activeLoadBalancersByCluster = nodeRepository.loadBalancers().list(application) .in(LoadBalancer.State.active) .asList() .stream() .collect(Collectors.toMap(lb -> lb.id().cluster(), Function.identity())); var surplus = new ArrayList<LoadBalancer>(); for (var kv : activeLoadBalancersByCluster.entrySet()) { if (activeClusters.contains(kv.getKey())) continue; surplus.add(kv.getValue()); } return Collections.unmodifiableList(surplus); } private void deactivate(List<LoadBalancer> loadBalancers, NestedTransaction transaction) { var now = nodeRepository.clock().instant(); var deactivatedLoadBalancers = loadBalancers.stream() .map(lb -> lb.with(LoadBalancer.State.inactive, now)) .toList(); db.writeLoadBalancers(deactivatedLoadBalancers, LoadBalancer.State.active, transaction); } /** Find all load balancer IDs owned by given tenant and application */ private List<LoadBalancerId> findLoadBalancers(TenantName tenant, ApplicationName application) { return db.readLoadBalancerIds().stream() .filter(id -> id.application().tenant().equals(tenant) && id.application().application().equals(application)) .toList(); } /** Require that load balancer IDs do not clash. This prevents name clashing when compacting endpoint DNS names */ private LoadBalancerId requireNonClashing(LoadBalancerId loadBalancerId) { List<LoadBalancerId> loadBalancerIds = findLoadBalancers(loadBalancerId.application().tenant(), loadBalancerId.application().application()); List<String> nonCompactableIds = withoutCompactableIds(loadBalancerId); for (var id : loadBalancerIds) { if (id.equals(loadBalancerId)) continue; if (nonCompactableIds.equals(withoutCompactableIds(id))) { throw new IllegalArgumentException(loadBalancerId + " clashes with " + id); } } return loadBalancerId; } private void prepare(LoadBalancerId id, ZoneEndpoint zoneEndpoint, NodeSpec requested) { CloudAccount cloudAccount = requested.cloudAccount(); Optional<LoadBalancer> loadBalancer = db.readLoadBalancer(id); LoadBalancer newLoadBalancer = null; LoadBalancer.State fromState = loadBalancer.map(LoadBalancer::state).orElse(null); try { if (loadBalancer.isPresent() && ! inAccount(cloudAccount, loadBalancer.get())) { newLoadBalancer = loadBalancer.get().with(State.removable, nodeRepository.clock().instant()); throw new LoadBalancerServiceException("Could not (re)configure " + id + " due to change in cloud account. The operation will be retried on next deployment"); } if (loadBalancer.isPresent() && ! hasCorrectVisibility(loadBalancer.get(), zoneEndpoint)) { newLoadBalancer = loadBalancer.get().with(State.removable, nodeRepository.clock().instant()); throw new LoadBalancerServiceException("Could not (re)configure " + id + " due to change in load balancer visibility. The operation will be retried on next deployment"); } newLoadBalancer = loadBalancer.orElseGet(() -> createNewLoadBalancer(id, zoneEndpoint, requested)); newLoadBalancer = newLoadBalancer.with(provisionInstance(newLoadBalancer, zoneEndpoint, requested)); } catch (LoadBalancerServiceException e) { log.log(Level.WARNING, "Failed to provision load balancer", e); throw e; } finally { db.writeLoadBalancer(newLoadBalancer, fromState); } } private static boolean hasCorrectVisibility(LoadBalancer newLoadBalancer, ZoneEndpoint zoneEndpoint) { return newLoadBalancer.instance().isEmpty() || newLoadBalancer.instance().get().settings().isPublicEndpoint() == zoneEndpoint.isPublicEndpoint(); } /** Creates a new load balancer, with an instance if one is taken from the pool, or without otherwise. */ private LoadBalancer createNewLoadBalancer(LoadBalancerId id, ZoneEndpoint zoneEndpoint, NodeSpec requested) { LoadBalancerSpec spec = new LoadBalancerSpec(id.application(), id.cluster(), Set.of(), zoneEndpoint, requested.cloudAccount(), toSeed(id, requested.type())); return provisionFromPool(spec, requested.type()) .orElseGet(() -> new LoadBalancer(id, spec.idSeed(), Optional.empty(), State.reserved, nodeRepository.clock().instant())); } /** Provision a load balancer instance, if necessary */ private LoadBalancerInstance provisionInstance(LoadBalancer currentLoadBalancer, ZoneEndpoint zoneEndpoint, NodeSpec requested) { LoadBalancerId id = currentLoadBalancer.id(); Set<Real> reals = currentLoadBalancer.instance() .map(LoadBalancerInstance::reals) .orElse(Set.of()); ZoneEndpoint settings = new ZoneEndpoint(zoneEndpoint.isPublicEndpoint(), zoneEndpoint.isPrivateEndpoint(), currentLoadBalancer.instance() .map(LoadBalancerInstance::settings) .map(ZoneEndpoint::allowedUrns) .orElse(List.of())); if (currentLoadBalancer.instance().map(instance -> settings.equals(instance.settings())).orElse(false)) return currentLoadBalancer.instance().get(); log.log(Level.INFO, () -> "Provisioning instance for " + id); try { return service.provision(new LoadBalancerSpec(id.application(), id.cluster(), reals, settings, requested.cloudAccount(), currentLoadBalancer.idSeed())) .withServiceIds(currentLoadBalancer.instance().map(LoadBalancerInstance::serviceIds).orElse(List.of())); } catch (Exception e) { throw new LoadBalancerServiceException("Could not provision " + id + ". The operation will be retried on next deployment.", e); } } private Optional<LoadBalancer> provisionFromPool(LoadBalancerSpec spec, NodeType type) { if (type != NodeType.tenant) return Optional.empty(); if ( ! spec.settings().isDefault()) return Optional.empty(); if (preProvisionPoolSize.value() == 0) return Optional.empty(); try (Lock lock = db.lock(preProvisionOwner)) { long tail = db.readLoadBalancerPoolTail(); if (tail >= db.readLoadBalancerPoolHead()) return Optional.empty(); ClusterSpec.Id slot = slotId(tail); Optional<LoadBalancer> candidate = db.readLoadBalancer(new LoadBalancerId(preProvisionOwner, slot)); if (candidate.flatMap(LoadBalancer::instance).map(instance -> ! instance.cloudAccount().equals(spec.cloudAccount())).orElse(false)) return Optional.empty(); db.incrementLoadBalancerPoolTail(); LoadBalancer chosen = candidate.orElseThrow(() -> new IllegalStateException("could not find load balancer " + slot + " in pre-provisioned pool")); if (chosen.state() != State.active || chosen.instance().isEmpty()) throw new IllegalStateException("expected active load balancer in pre-provisioned pool, but got " + chosen); log.log(Level.INFO, "Using " + chosen + " from pre-provisioned pool"); service.reallocate(new LoadBalancerSpec(spec.application(), spec.cluster(), spec.reals(), spec.settings(), spec.cloudAccount(), chosen.idSeed())); db.removeLoadBalancer(chosen.id()); return Optional.of(new LoadBalancer(new LoadBalancerId(spec.application(), spec.cluster()), chosen.idSeed(), chosen.instance(), State.reserved, nodeRepository.clock().instant())); } catch (Exception e) { log.log(Level.WARNING, "Failed to provision load balancer from pool", e); } return Optional.empty(); } static ClusterSpec.Id slotId(long counter) { return ClusterSpec.Id.from(String.valueOf(counter)); } static long slotOf(ClusterSpec.Id id) { return Long.parseLong(id.value()); } /** Evict surplus and failed load balancers, and pre-provision deficit ones. Should only be run by a maintenance job. */ public void refreshPool() { int size = preProvisionPoolSize.value(); long head = db.readLoadBalancerPoolHead(); long tail = db.readLoadBalancerPoolTail(); try (Lock lock = db.lock(preProvisionOwner)) { while (head - tail > size) tail = db.incrementLoadBalancerPoolTail(); for (LoadBalancer lb : db.readLoadBalancers(l -> l.application().equals(preProvisionOwner)).values()) { long slot = slotOf(lb.id().cluster()); if (slot < tail) db.writeLoadBalancer(lb.with(State.removable, nodeRepository.clock().instant()), lb.state()); } } while (head - tail < size) { ClusterSpec.Id slot = slotId(head); LoadBalancerId id = new LoadBalancerId(preProvisionOwner, slot); LoadBalancerSpec spec = preProvisionSpec(slot, nodeRepository.zone().cloud().account(), toSeed(id)); db.writeLoadBalancer(new LoadBalancer(id, spec.idSeed(), Optional.of(service.provision(spec)), State.active, nodeRepository.clock().instant()), null); head = db.incrementLoadBalancerPoolHead(); } } public static String toSeed(LoadBalancerId id, NodeType type) { return type == NodeType.tenant ? toSeed(id) : toLegacySeed(id.application(), id.cluster()); } public static String toLegacySeed(ApplicationId application, ClusterSpec.Id cluster) { return application.tenant().value() + application.application().value() + application.instance().value() + cluster.value(); } private void activate(ApplicationTransaction transaction, ClusterSpec.Id cluster, ZoneEndpoint settings, NodeList nodes) { Instant now = nodeRepository.clock().instant(); LoadBalancerId id = new LoadBalancerId(transaction.application(), cluster); Optional<LoadBalancer> loadBalancer = db.readLoadBalancer(id); if (loadBalancer.isEmpty()) throw new IllegalArgumentException("Could not activate load balancer that was never prepared: " + id); if (loadBalancer.get().instance().isEmpty()) throw new IllegalArgumentException("Activating " + id + ", but prepare never provisioned a load balancer instance"); try { LoadBalancerInstance instance = configureInstance(id, nodes, loadBalancer.get(), settings, loadBalancer.get().instance().get().cloudAccount()); db.writeLoadBalancers(List.of(loadBalancer.get().with(instance).with(State.active, now)), loadBalancer.get().state(), transaction.nested()); } catch (LoadBalancerServiceException e) { db.writeLoadBalancers(List.of(loadBalancer.get()), loadBalancer.get().state(), transaction.nested()); throw e; } } /** Reconfigure a load balancer instance, if necessary */ private LoadBalancerInstance configureInstance(LoadBalancerId id, NodeList nodes, LoadBalancer currentLoadBalancer, ZoneEndpoint zoneEndpoint, CloudAccount cloudAccount) { boolean shouldDeactivateRouting = deactivateRouting.with(FetchVector.Dimension.INSTANCE_ID, id.application().serializedForm()) .value(); Set<Real> reals = shouldDeactivateRouting ? Set.of() : realsOf(nodes, cloudAccount); log.log(Level.FINE, () -> "Configuring instance for " + id + ", targeting: " + reals); try { return service.configure(currentLoadBalancer.instance().orElseThrow(() -> new IllegalArgumentException("expected existing instance for " + id)), new LoadBalancerSpec(id.application(), id.cluster(), reals, zoneEndpoint, cloudAccount, currentLoadBalancer.idSeed()), shouldDeactivateRouting || currentLoadBalancer.state() != LoadBalancer.State.active); } catch (Exception e) { throw new LoadBalancerServiceException("Could not (re)configure " + id + ", targeting: " + reals, e); } } /** Returns the load balanced clusters of given application and their nodes */ private Map<ClusterSpec.Id, NodeList> loadBalancedClustersOf(NodeList nodes) { if (nodes.stream().anyMatch(node -> node.type() == NodeType.config)) { nodes = nodes.nodeType(NodeType.config).type(ClusterSpec.Type.admin); } else if (nodes.stream().anyMatch(node -> node.type() == NodeType.controller)) { nodes = nodes.nodeType(NodeType.controller).container(); } else { nodes = nodes.nodeType(NodeType.tenant).container(); } return nodes.groupingBy(node -> effectiveId(node.allocation().get().membership().cluster())); } /** Returns real servers for given nodes */ private Set<Real> realsOf(NodeList nodes, CloudAccount cloudAccount) { Set<Real> reals = new LinkedHashSet<>(); for (var node : nodes) { for (var ip : reachableIpAddresses(node, cloudAccount)) { reals.add(new Real(HostName.of(node.hostname()), ip)); } } return reals; } /** Returns a list of the non-compactable IDs of given load balancer */ private static List<String> withoutCompactableIds(LoadBalancerId id) { List<String> ids = new ArrayList<>(2); if (!"default".equals(id.cluster().value())) { ids.add(id.cluster().value()); } if (!id.application().instance().isDefault()) { ids.add(id.application().instance().value()); } return ids; } /** Returns whether load balancer is provisioned in given account */ private boolean inAccount(CloudAccount cloudAccount, LoadBalancer loadBalancer) { return !nodeRepository.zone().cloud().allowEnclave() || loadBalancer.instance().isEmpty() || loadBalancer.instance().get().cloudAccount().equals(cloudAccount); } /** Find IP addresses reachable by the load balancer service */ private Set<String> reachableIpAddresses(Node node, CloudAccount cloudAccount) { Set<String> reachable = new LinkedHashSet<>(node.ipConfig().primary()); boolean forceIpv6 = ipv6AwsTargetGroups.with(FetchVector.Dimension.CLOUD_ACCOUNT, cloudAccount.account()).value(); var protocol = forceIpv6 ? LoadBalancerService.Protocol.ipv6 : service.protocol(node.cloudAccount().isExclave(nodeRepository.zone())); switch (protocol) { case ipv4 -> reachable.removeIf(IP::isV6); case ipv6 -> reachable.removeIf(IP::isV4); } return reachable; } private static ClusterSpec.Id effectiveId(ClusterSpec cluster) { return cluster.combinedId().orElse(cluster.id()); } }
Consider using _Objects.requireNonNull(token, "Token string must not be null");_ instead.
public void addToken(String token) { if (token == null) throw new IllegalArgumentException("token must be a string"); tokens.add(token); }
if (token == null) throw new IllegalArgumentException("token must be a string");
public void addToken(String token) { Objects.requireNonNull(token, "Token string must not be null"); tokens.add(token); }
class StringInItem extends InItem { private Set<String> tokens; public StringInItem(String indexName) { super(indexName); tokens = new HashSet<>(1000); } @Override public ItemType getItemType() { return ItemType.STRING_IN; } @Override public int encode(ByteBuffer buffer) { encodeThis(buffer); return 1; } @Override protected void encodeThis(ByteBuffer buffer) { super.encodeThis(buffer); IntegerCompressor.putCompressedPositiveNumber(tokens.size(), buffer); putString(getIndexName(), buffer); for (var entry : tokens) { putString(entry, buffer); } } @Override public int getTermCount() { return 1; } @Override protected void appendBodyString(StringBuilder buffer) { buffer.append(getIndexName()); buffer.append("{"); for (var entry : tokens) { buffer.append("\""); buffer.append(entry); buffer.append("\","); } if (!tokens.isEmpty()) { buffer.deleteCharAt(buffer.length() - 1); } buffer.append("}"); } public Collection<String> getTokens() { return Set.copyOf(tokens); } @Override public boolean equals(Object o) { if (o == this) return true; if ( ! super.equals(o)) return false; var other = (StringInItem)o; if ( ! Objects.equals(this.tokens, other.tokens)) return false; return true; } @Override public int hashCode() { return Objects.hash(super.hashCode(), tokens); } }
class StringInItem extends InItem { private Set<String> tokens; public StringInItem(String indexName) { super(indexName); tokens = new HashSet<>(1000); } @Override public ItemType getItemType() { return ItemType.STRING_IN; } @Override public int encode(ByteBuffer buffer) { encodeThis(buffer); return 1; } @Override protected void encodeThis(ByteBuffer buffer) { super.encodeThis(buffer); IntegerCompressor.putCompressedPositiveNumber(tokens.size(), buffer); putString(getIndexName(), buffer); for (var entry : tokens) { putString(entry, buffer); } } @Override public int getTermCount() { return 1; } @Override protected void appendBodyString(StringBuilder buffer) { buffer.append(getIndexName()); buffer.append("{"); for (var entry : tokens) { buffer.append("\""); buffer.append(entry); buffer.append("\","); } if (!tokens.isEmpty()) { buffer.deleteCharAt(buffer.length() - 1); } buffer.append("}"); } public Collection<String> getTokens() { return Set.copyOf(tokens); } @Override public boolean equals(Object o) { if (o == this) return true; if ( ! super.equals(o)) return false; var other = (StringInItem)o; if ( ! Objects.equals(this.tokens, other.tokens)) return false; return true; } @Override public int hashCode() { return Objects.hash(super.hashCode(), tokens); } }
Consider rename to _token_ (as there is no weight here).
public static void addStringTokensFromString(String string, StringInItem out) { if (string == null) { return; } var s = new ParsableString(string); while (!s.atEnd()) { String key; if (s.passOptional('\'')) { key = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { key = s.stringTo(s.position('"')); s.pass('"'); } else { key = s.stringTo(s.positionOrEnd(',')).trim(); } out.addToken(key); s.passOptional(','); } }
String key;
public static void addStringTokensFromString(String string, StringInItem out) { if (string == null) { return; } var s = new ParsableString(string); while (!s.atEnd()) { String token; if (s.passOptional('\'')) { token = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { token = s.stringTo(s.position('"')); s.pass('"'); } else { token = s.stringTo(s.positionOrEnd(',')).trim(); } out.addToken(token); s.passOptional(','); } }
class ParameterListParser { public static void addItemsFromString(String string, WeightedSetItem out) { var s = new ParsableString(string); switch (s.peek()) { case '[' : addArrayItems(s, out); break; case '{' : addMapItems(s, out); break; default : throw new IllegalArgumentException("Expected a string starting by '[' or '{', " + "but was '" + s.peek() + "'"); } } private static void addArrayItems(ParsableString s, WeightedSetItem out) { s.pass('['); while (s.peek() != ']') { s.pass('['); long key = s.longTo(s.position(',')); s.pass(','); int value = s.intTo(s.position(']')); s.pass(']'); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected an array ending by ']'"); } s.pass(']'); } private static void addMapItems(ParsableString s, WeightedSetItem out) { s.pass('{'); while (s.peek() != '}') { String key; if (s.passOptional('\'')) { key = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { key = s.stringTo(s.position('"')); s.pass('"'); } else { key = s.stringTo(s.position(':')).trim(); } s.pass(':'); int value = s.intTo(s.position(',','}')); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected a map ending by '}'"); } s.pass('}'); }
class ParameterListParser { public static void addItemsFromString(String string, WeightedSetItem out) { var s = new ParsableString(string); switch (s.peek()) { case '[' : addArrayItems(s, out); break; case '{' : addMapItems(s, out); break; default : throw new IllegalArgumentException("Expected a string starting by '[' or '{', " + "but was '" + s.peek() + "'"); } } private static void addArrayItems(ParsableString s, WeightedSetItem out) { s.pass('['); while (s.peek() != ']') { s.pass('['); long key = s.longTo(s.position(',')); s.pass(','); int value = s.intTo(s.position(']')); s.pass(']'); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected an array ending by ']'"); } s.pass(']'); } private static void addMapItems(ParsableString s, WeightedSetItem out) { s.pass('{'); while (s.peek() != '}') { String key; if (s.passOptional('\'')) { key = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { key = s.stringTo(s.position('"')); s.pass('"'); } else { key = s.stringTo(s.position(':')).trim(); } s.pass(':'); int value = s.intTo(s.position(',','}')); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected a map ending by '}'"); } s.pass('}'); }
Consider rename to _token_ (as there is no weight here).
public static void addNumericTokensFromString(String string, NumericInItem out) { if (string == null) { return; } var s = new ParsableString(string); while (!s.atEnd()) { long key = s.longTo(s.positionOrEnd(',')); out.addToken(key); s.passOptional(','); } }
long key = s.longTo(s.positionOrEnd(','));
public static void addNumericTokensFromString(String string, NumericInItem out) { if (string == null) { return; } var s = new ParsableString(string); while (!s.atEnd()) { long token = s.longTo(s.positionOrEnd(',')); out.addToken(token); s.passOptional(','); } }
class ParameterListParser { public static void addItemsFromString(String string, WeightedSetItem out) { var s = new ParsableString(string); switch (s.peek()) { case '[' : addArrayItems(s, out); break; case '{' : addMapItems(s, out); break; default : throw new IllegalArgumentException("Expected a string starting by '[' or '{', " + "but was '" + s.peek() + "'"); } } private static void addArrayItems(ParsableString s, WeightedSetItem out) { s.pass('['); while (s.peek() != ']') { s.pass('['); long key = s.longTo(s.position(',')); s.pass(','); int value = s.intTo(s.position(']')); s.pass(']'); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected an array ending by ']'"); } s.pass(']'); } private static void addMapItems(ParsableString s, WeightedSetItem out) { s.pass('{'); while (s.peek() != '}') { String key; if (s.passOptional('\'')) { key = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { key = s.stringTo(s.position('"')); s.pass('"'); } else { key = s.stringTo(s.position(':')).trim(); } s.pass(':'); int value = s.intTo(s.position(',','}')); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected a map ending by '}'"); } s.pass('}'); }
class ParameterListParser { public static void addItemsFromString(String string, WeightedSetItem out) { var s = new ParsableString(string); switch (s.peek()) { case '[' : addArrayItems(s, out); break; case '{' : addMapItems(s, out); break; default : throw new IllegalArgumentException("Expected a string starting by '[' or '{', " + "but was '" + s.peek() + "'"); } } private static void addArrayItems(ParsableString s, WeightedSetItem out) { s.pass('['); while (s.peek() != ']') { s.pass('['); long key = s.longTo(s.position(',')); s.pass(','); int value = s.intTo(s.position(']')); s.pass(']'); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected an array ending by ']'"); } s.pass(']'); } private static void addMapItems(ParsableString s, WeightedSetItem out) { s.pass('{'); while (s.peek() != '}') { String key; if (s.passOptional('\'')) { key = s.stringTo(s.position('\'')); s.pass('\''); } else if (s.passOptional('"')) { key = s.stringTo(s.position('"')); s.pass('"'); } else { key = s.stringTo(s.position(':')).trim(); } s.pass(':'); int value = s.intTo(s.position(',','}')); out.addToken(key, value); s.passOptional(','); if (s.atEnd()) throw new IllegalArgumentException("Expected a map ending by '}'"); } s.pass('}'); }
Should this be _3 bytes string content_? 2 + 1 + 1 + 7 + 1 + **3** = 15
void testStringInItem() { var a = new StringInItem("default"); a.addToken("foo"); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(15, buffer.remaining(), "Serialization size"); assertType(buffer, 31, 0); assertEquals(1, buffer.get()); assertString(buffer, "default"); assertString(buffer, "foo"); }
void testStringInItem() { var a = new StringInItem("default"); a.addToken("foo"); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(15, buffer.remaining(), "Serialization size"); assertType(buffer, 31, 0); assertEquals(1, buffer.get()); assertString(buffer, "default"); assertString(buffer, "foo"); }
class ItemEncodingTestCase { private void assertType(ByteBuffer buffer, int etype, int efeatures) { byte CODE_MASK = 0b00011111; byte features_and_type = buffer.get(); int features = (features_and_type & 0xe0) >> 5; int type = features_and_type & CODE_MASK; if (type == CODE_MASK) { byte type_extension = buffer.get(); assertTrue(type_extension >= 0); type += type_extension; } assertEquals(etype, type, "Code"); assertEquals(efeatures, features, "Features"); } private void assertWeight(ByteBuffer buffer, int weight) { int w = (weight > (1 << 5)) ? buffer.getShort() & 0x3fff: buffer.get(); assertEquals(weight, w, "Weight"); } @Test void testWordItemEncoding() { WordItem word = new WordItem("test"); word.setWeight(150); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 1); assertWeight(buffer, 150); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testStartHostMarkerEncoding() { WordItem word = MarkerWordItem.createStartOfHost(); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertEquals(9, buffer.get(), "Word length"); assertEquals(9, buffer.remaining(), "Word length"); assertEquals('S', buffer.get()); assertEquals('t', buffer.get()); assertEquals('A', buffer.get()); assertEquals('r', buffer.get()); assertEquals('T', buffer.get()); assertEquals('h', buffer.get()); assertEquals('O', buffer.get()); assertEquals('s', buffer.get()); assertEquals('T', buffer.get()); } @Test void testEndHostMarkerEncoding() { WordItem word = MarkerWordItem.createEndOfHost(); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertEquals(7, buffer.get(), "Word length"); assertEquals(7, buffer.remaining(), "Word length"); assertEquals('E', buffer.get()); assertEquals('n', buffer.get()); assertEquals('D', buffer.get()); assertEquals('h', buffer.get()); assertEquals('O', buffer.get()); assertEquals('s', buffer.get()); assertEquals('T', buffer.get()); } @Test void testFilterWordItemEncoding() { WordItem word = new WordItem("test"); word.setFilter(true); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 4); assertEquals(0x08, buffer.get()); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testNoRankedNoPositionDataWordItemEncoding() { WordItem word = new WordItem("test"); word.setRanked(false); word.setPositionData(false); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 4); assertEquals(0x05, buffer.get()); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testAndItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); AndItem and = new AndItem(); and.addItem(a); and.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = and.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 1, 0); assertEquals(2, buffer.get(), "And arity"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testNearItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); NearItem near = new NearItem(7); near.addItem(a); near.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = near.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 11, 0); assertEquals(2, buffer.get(), "Near arity"); assertEquals(7, buffer.get(), "Limit"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testONearItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); NearItem onear = new ONearItem(7); onear.addItem(a); onear.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = onear.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 12, 0); assertEquals(2, buffer.get(), "Near arity"); assertEquals(7, buffer.get(), "Limit"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testEquivItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); EquivItem equiv = new EquivItem(); equiv.addItem(a); equiv.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = equiv.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 14, 0); assertEquals(2, buffer.get(), "Equiv arity"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testWandItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); WeakAndItem wand = new WeakAndItem(); wand.addItem(a); wand.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = wand.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 16, 0); assertEquals(2, buffer.get(), "WeakAnd arity"); assertEquals(100, buffer.getShort() & 0x3fff, "WeakAnd N"); assertEquals(0, buffer.get()); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testPureWeightedStringEncoding() { PureWeightedString a = new PureWeightedString("a"); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(3, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 19, 0); assertString(buffer, a.getString()); } @Test void testPureWeightedStringEncodingWithNonDefaultWeight() { PureWeightedString a = new PureWeightedString("a", 7); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(4, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 19, 1); assertWeight(buffer, 7); assertString(buffer, a.getString()); } @Test void testPureWeightedIntegerEncoding() { PureWeightedInteger a = new PureWeightedInteger(23432568763534865l); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(9, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 20, 0); assertEquals(a.getValue(), buffer.getLong(), "Value"); } @Test void testPureWeightedLongEncodingWithNonDefaultWeight() { PureWeightedInteger a = new PureWeightedInteger(23432568763534865l, 7); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(10, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 20, 1); assertWeight(buffer, 7); assertEquals(a.getValue(), buffer.getLong(), "Value"); ; } @Test @Test void testNumericInItem() { var a = new NumericInItem("default"); a.addToken(42); a.addToken(97000000000L); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(27, buffer.remaining(), "Serialization size"); assertType(buffer, 32, 0); assertEquals(2, buffer.get()); assertString(buffer, "default"); var array = new ArrayList<Long>(); array.add(buffer.getLong()); array.add(buffer.getLong()); Collections.sort(array); assertEquals(42, array.get(0)); assertEquals(97000000000L, array.get(1)); } private void assertString(ByteBuffer buffer, String word) { assertEquals(word.length(), buffer.get(), "Word length"); for (int i=0; i<word.length(); i++) { assertEquals(word.charAt(i), buffer.get(), "Character at " + i); } } private void assertWord(ByteBuffer buffer,String word) { assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertString(buffer, word); } }
class ItemEncodingTestCase { private void assertType(ByteBuffer buffer, int etype, int efeatures) { byte CODE_MASK = 0b00011111; byte features_and_type = buffer.get(); int features = (features_and_type & 0xe0) >> 5; int type = features_and_type & CODE_MASK; if (type == CODE_MASK) { byte type_extension = buffer.get(); assertTrue(type_extension >= 0); type += type_extension; } assertEquals(etype, type, "Code"); assertEquals(efeatures, features, "Features"); } private void assertWeight(ByteBuffer buffer, int weight) { int w = (weight > (1 << 5)) ? buffer.getShort() & 0x3fff: buffer.get(); assertEquals(weight, w, "Weight"); } @Test void testWordItemEncoding() { WordItem word = new WordItem("test"); word.setWeight(150); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 1); assertWeight(buffer, 150); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testStartHostMarkerEncoding() { WordItem word = MarkerWordItem.createStartOfHost(); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertEquals(9, buffer.get(), "Word length"); assertEquals(9, buffer.remaining(), "Word length"); assertEquals('S', buffer.get()); assertEquals('t', buffer.get()); assertEquals('A', buffer.get()); assertEquals('r', buffer.get()); assertEquals('T', buffer.get()); assertEquals('h', buffer.get()); assertEquals('O', buffer.get()); assertEquals('s', buffer.get()); assertEquals('T', buffer.get()); } @Test void testEndHostMarkerEncoding() { WordItem word = MarkerWordItem.createEndOfHost(); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertEquals(7, buffer.get(), "Word length"); assertEquals(7, buffer.remaining(), "Word length"); assertEquals('E', buffer.get()); assertEquals('n', buffer.get()); assertEquals('D', buffer.get()); assertEquals('h', buffer.get()); assertEquals('O', buffer.get()); assertEquals('s', buffer.get()); assertEquals('T', buffer.get()); } @Test void testFilterWordItemEncoding() { WordItem word = new WordItem("test"); word.setFilter(true); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 4); assertEquals(0x08, buffer.get()); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testNoRankedNoPositionDataWordItemEncoding() { WordItem word = new WordItem("test"); word.setRanked(false); word.setPositionData(false); ByteBuffer buffer = ByteBuffer.allocate(128); int count = word.encode(buffer); buffer.flip(); assertEquals(1, count, "Serialization count"); assertType(buffer, 4, 4); assertEquals(0x05, buffer.get()); assertEquals(0, buffer.get(), "Index length"); assertEquals(4, buffer.get(), "Word length"); assertEquals(4, buffer.remaining(), "Word length"); assertEquals('t', buffer.get()); assertEquals('e', buffer.get()); assertEquals('s', buffer.get()); assertEquals('t', buffer.get()); } @Test void testAndItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); AndItem and = new AndItem(); and.addItem(a); and.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = and.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 1, 0); assertEquals(2, buffer.get(), "And arity"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testNearItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); NearItem near = new NearItem(7); near.addItem(a); near.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = near.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 11, 0); assertEquals(2, buffer.get(), "Near arity"); assertEquals(7, buffer.get(), "Limit"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testONearItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); NearItem onear = new ONearItem(7); onear.addItem(a); onear.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = onear.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 12, 0); assertEquals(2, buffer.get(), "Near arity"); assertEquals(7, buffer.get(), "Limit"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testEquivItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); EquivItem equiv = new EquivItem(); equiv.addItem(a); equiv.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = equiv.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 14, 0); assertEquals(2, buffer.get(), "Equiv arity"); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testWandItemEncoding() { WordItem a = new WordItem("a"); WordItem b = new WordItem("b"); WeakAndItem wand = new WeakAndItem(); wand.addItem(a); wand.addItem(b); ByteBuffer buffer = ByteBuffer.allocate(128); int count = wand.encode(buffer); buffer.flip(); assertEquals(3, count, "Serialization count"); assertType(buffer, 16, 0); assertEquals(2, buffer.get(), "WeakAnd arity"); assertEquals(100, buffer.getShort() & 0x3fff, "WeakAnd N"); assertEquals(0, buffer.get()); assertWord(buffer, "a"); assertWord(buffer, "b"); } @Test void testPureWeightedStringEncoding() { PureWeightedString a = new PureWeightedString("a"); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(3, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 19, 0); assertString(buffer, a.getString()); } @Test void testPureWeightedStringEncodingWithNonDefaultWeight() { PureWeightedString a = new PureWeightedString("a", 7); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(4, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 19, 1); assertWeight(buffer, 7); assertString(buffer, a.getString()); } @Test void testPureWeightedIntegerEncoding() { PureWeightedInteger a = new PureWeightedInteger(23432568763534865l); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(9, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 20, 0); assertEquals(a.getValue(), buffer.getLong(), "Value"); } @Test void testPureWeightedLongEncodingWithNonDefaultWeight() { PureWeightedInteger a = new PureWeightedInteger(23432568763534865l, 7); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(10, buffer.remaining(), "Serialization size"); assertEquals(1, count, "Serialization count"); assertType(buffer, 20, 1); assertWeight(buffer, 7); assertEquals(a.getValue(), buffer.getLong(), "Value"); ; } @Test @Test void testNumericInItem() { var a = new NumericInItem("default"); a.addToken(42); a.addToken(97000000000L); ByteBuffer buffer = ByteBuffer.allocate(128); int count = a.encode(buffer); buffer.flip(); assertEquals(27, buffer.remaining(), "Serialization size"); assertType(buffer, 32, 0); assertEquals(2, buffer.get()); assertString(buffer, "default"); var array = new ArrayList<Long>(); array.add(buffer.getLong()); array.add(buffer.getLong()); Collections.sort(array); assertEquals(42, array.get(0)); assertEquals(97000000000L, array.get(1)); } private void assertString(ByteBuffer buffer, String word) { assertEquals(word.length(), buffer.get(), "Word length"); for (int i=0; i<word.length(); i++) { assertEquals(word.charAt(i), buffer.get(), "Character at " + i); } } private void assertWord(ByteBuffer buffer,String word) { assertType(buffer, 4, 0); assertEquals(0, buffer.get(), "Index length"); assertString(buffer, word); } }
```suggestion throw new IllegalArgumentException("The in operator is only supported for integer and string fields. The field " + field + " is not of these types"); ```
private Item buildIn(OperatorNode<ExpressionOperator> ast) { String field = getIndex(ast.getArgument(0)); var index = indexFactsSession.getIndex(field); boolean stringField = index.isString(); if (!index.isInteger() && !stringField) throw new IllegalArgumentException("index " + field + " is not an integer or string field"); Item item = null; if (stringField) { item = fillStringIn(ast, ast.getArgument(1), new StringInItem(field)); } else { item = fillNumericIn(ast, ast.getArgument(1), new NumericInItem(field)); } return item; }
throw new IllegalArgumentException("index " + field + " is not an integer or string field");
private Item buildIn(OperatorNode<ExpressionOperator> ast) { String field = getIndex(ast.getArgument(0)); var index = indexFactsSession.getIndex(field); boolean stringField = index.isString(); if (!index.isInteger() && !stringField) throw new IllegalArgumentException("The in operator is only supported for integer and string fields. The field " + field + " is not of these types"); Item item = null; if (stringField) { item = fillStringIn(ast, ast.getArgument(1), new StringInItem(field)); } else { item = fillNumericIn(ast, ast.getArgument(1), new NumericInItem(field)); } return item; }
class IndexNameExpander { public String expand(String leaf) { return leaf; } }
class IndexNameExpander { public String expand(String leaf) { return leaf; } }
Can be simplified to `activacteContainer(null).close()`
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
try (DeactivatedContainer deactivated = activateContainer(null)) { }
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
Typo
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(buionf }
try (DeactivatedContainer deactivated = loader.activateContainer(buionf
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(builder)) { return deactivated; } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
`activateContainer(...)` returns `null` when there is no previous container.
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
try (DeactivatedContainer deactivated = activateContainer(null)) { }
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
What!?
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(buionf }
try (DeactivatedContainer deactivated = loader.activateContainer(buionf
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(builder)) { return deactivated; } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
Thanks.
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(buionf }
try (DeactivatedContainer deactivated = loader.activateContainer(buionf
public DeactivatedContainer activateContainer(ContainerBuilder builder) { try (DeactivatedContainer deactivated = loader.activateContainer(builder)) { return deactivated; } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
class TestDriver implements ContainerActivator, CurrentContainer { private static final AtomicInteger testId = new AtomicInteger(0); private final FutureTask<Boolean> closeTask = new FutureTask<>(new CloseTask()); private final ApplicationLoader loader; private TestDriver(ApplicationLoader loader) { this.loader = loader; } @Override public ContainerBuilder newContainerBuilder() { return loader.newContainerBuilder(); } /** Returns the deactivated container, with its container reference already released. */ @Override @Override public Container newReference(URI uri) { return loader.newReference(uri); } /** * <p>Returns the {@link BootstrapLoader} used by this TestDriver. Use caution when invoking methods on the * BootstrapLoader directly, since the lifecycle management done by this TestDriver may become corrupt.</p> * * @return The BootstrapLoader. */ public BootstrapLoader bootstrapLoader() { return loader; } /** * <p>Returns the {@link Application} loaded by this TestDriver. Until {@link * never return null.</p> * * @return The loaded Application. */ public Application application() { return loader.application(); } /** * <p>Returns the {@link OsgiFramework} created by this TestDriver. Although this method will never return null, it * might return a {@link NonWorkingOsgiFramework} depending on the factory method used to instantiate it.</p> * * @return The OSGi framework. */ public OsgiFramework osgiFramework() { return loader.osgiFramework(); } /** * <p>Convenience method to create and {@link Request * CurrentContainer}. This method will either return the corresponding {@link ContentChannel} or throw the * appropriate exception (see {@link Request * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return The ContentChannel returned by {@link Request * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public ContentChannel connectRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).connect(); } /** * <p>Convenience method to create a {@link Request}, connect it to a {@link RequestHandler}, and close the returned * {@link ContentChannel}. This is the same as calling:</p> * <pre> * connectRequest(uri, responseHandler).close(null); * </pre> * * @param requestUri The URI string to parse and pass to the Request constructor. * @param responseHandler The ResponseHandler to pass to {@link Request * @return A waitable Future that provides access to the corresponding {@link Response}. * @throws NullPointerException If the URI string or the {@link ResponseHandler} is null. * @throws IllegalArgumentException If the URI string violates RFC&nbsp;2396. * @throws BindingNotFoundException If the corresponding call to {@link Container * returns null. * @throws RequestDeniedException If the corresponding call to {@link RequestHandler * ResponseHandler)} returns null. */ public Future<Response> dispatchRequest(String requestUri, ResponseHandler responseHandler) { return newRequestDispatch(requestUri, responseHandler).dispatch(); } /** * <p>Initiates the shut down of this TestDriver in another thread. By doing this in a separate thread, it allows * other code to monitor its progress. Unless you need the added monitoring capability, you should use {@link * * * @see */ public void scheduleClose() { new Thread(closeTask, "TestDriver.Closer").start(); } /** * <p>Waits for shut down of this TestDriver to complete. This call must be preceded by a call to {@link * * * @param timeout The maximum time to wait. * @param unit The time unit of the timeout argument. * @return True if shut down completed within the allocated time. */ public boolean awaitClose(long timeout, TimeUnit unit) { try { closeTask.get(timeout, unit); return true; } catch (TimeoutException e) { return false; } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } } /** * <p>Initiatiates shut down of this TestDriver and waits for it to complete. If shut down fails to complete within * 60 seconds, this method throws an exception.</p> * * @return True if shut down completed within the allocated time. * @throws IllegalStateException If shut down failed to complete within the allocated time. */ public boolean close() { scheduleClose(); if ( ! awaitClose(600, TimeUnit.SECONDS)) { throw new IllegalStateException("Application failed to terminate within allocated time."); } return true; } /** * <p>Creates a new {@link RequestDispatch} that dispatches a {@link Request} with the given URI and {@link * ResponseHandler}.</p> * * @param requestUri The uri of the Request to create. * @param responseHandler The ResponseHandler to use for the dispather. * @return The created RequestDispatch. */ public RequestDispatch newRequestDispatch(final String requestUri, final ResponseHandler responseHandler) { return new RequestDispatch() { @Override protected Request newRequest() { return new Request(loader, URI.create(requestUri)); } @Override public ContentChannel handleResponse(Response response) { return responseHandler.handleResponse(response); } }; } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param appClass The Application class to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Class<? extends Application> appClass, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, appClass, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInjectedApplicationInstance(Application app, Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with an injected {@link Application}, but without OSGi support.</p> * * @param app The Application to inject. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newInjectedApplicationInstanceWithoutOsgi(Application app, Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(app, null, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation. The injected Application class * implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newSimpleApplicationInstance(Module... guiceModules) { return newInstance(newOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver with a predefined {@link Application} implementation, but without OSGi support. The * injected Application class implements nothing but the bare minimum to conform to the Application interface.</p> * * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. * @see * @see */ public static TestDriver newSimpleApplicationInstanceWithoutOsgi(Module... guiceModules) { return newInstance(newNonWorkingOsgiFramework(), null, false, newModuleList(null, SimpleApplication.class, guiceModules)); } /** * <p>Creates a new TestDriver from an application bundle. This runs the same code path as the actual jDISC startup * code. Note that the named bundle must have a "X-JDisc-Application" bundle instruction, or setup will fail.</p> * * @param bundleLocation The location of the application bundle to load. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newApplicationBundleInstance(String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(newOsgiFramework(), bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Creates a new TestDriver with the given parameters. This is the factory method that all other factory methods * call. It allows you to specify all parts of the TestDriver manually.</p> * * @param osgiFramework The {@link OsgiFramework} to assign to the created TestDriver. * @param bundleLocation The location of the application bundle to load, may be null. * @param privileged Whether or not privileges should be marked as available to the application bundle. * @param guiceModules The Guice {@link Module Modules} to install prior to startup. * @return The created TestDriver. */ public static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Module... guiceModules) { return newInstance(osgiFramework, bundleLocation, privileged, Arrays.asList(guiceModules)); } /** * <p>Factory method to create a working {@link OsgiFramework}. This method is used by all {@link TestDriver} * factories that DO NOT have the "WithoutOsgi" suffix.</p> * * @return A working OsgiFramework. */ public static FelixFramework newOsgiFramework() { return new FelixFramework(new FelixParams().setCachePath("target/bundlecache" + testId.getAndIncrement())); } /** * <p>Factory method to create a light-weight {@link OsgiFramework} that throws {@link * UnsupportedOperationException} if {@link OsgiFramework * OsgiFramework * support. This method is used by {@link TestDriver} factories that have the "WithoutOsgi" suffix.</p> * * @return A non-working OsgiFramework. */ public static OsgiFramework newNonWorkingOsgiFramework() { return new NonWorkingOsgiFramework(); } private class CloseTask implements Callable<Boolean> { @Override public Boolean call() throws Exception { loader.stop(); loader.destroy(); return true; } } private static TestDriver newInstance(OsgiFramework osgiFramework, String bundleLocation, boolean privileged, Iterable<? extends Module> guiceModules) { ApplicationLoader loader = new ApplicationLoader(osgiFramework, guiceModules); try { loader.init(bundleLocation, privileged); } catch (Exception e) { throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } try { loader.start(); } catch (Exception e) { loader.destroy(); throw e instanceof RuntimeException ? (RuntimeException)e : new RuntimeException(e); } return new TestDriver(loader); } private static List<Module> newModuleList(final Application app, final Class<? extends Application> appClass, Module... guiceModules) { List<Module> lst = new LinkedList<>(); lst.addAll(Arrays.asList(guiceModules)); lst.add(new AbstractModule() { @Override public void configure() { AnnotatedBindingBuilder<Application> builder = bind(Application.class); if (app != null) { builder.toInstance(app); } else { builder.to(appClass); } } }); return lst; } private static class SimpleApplication implements Application { @Override public void start() { } @Override public void stop() { } @Override public void destroy() { } } }
Clever!
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
try (DeactivatedContainer deactivated = activateContainer(null)) { }
public void stop() throws Exception { log.finer("Destroying application."); Application app; ApplicationInUseTracker applicationInUseTracker; synchronized (appLock) { app = application; applicationInUseTracker = this.applicationInUseTracker; } if (app == null || applicationInUseTracker == null) { return; } try { app.stop(); } catch (Exception e) { log.log(Level.WARNING, "Exception thrown while deactivating application.", e); } synchronized (appLock) { application = null; } try (DeactivatedContainer deactivated = activateContainer(null)) { } synchronized (appLock) { this.applicationInUseTracker = null; } applicationInUseTracker.release(); applicationInUseTracker.applicationInUseLatch.await(); app.destroy(); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
class " + appName + " from bundle '" + appBundle.getSymbolicName() + "'."); Class<Application> appClass = ContainerBuilder.safeClassCast(Application.class, appBundle.loadClass(appName)); app = injector.getInstance(appClass); } else { app = injector.getInstance(Application.class); log.finer("Injecting instance of " + app.getClass().getName() + "."); }
This will only reduce the probability. You need to make an explicit start method that must be called after the object has been fully constructed.
public AbstractSpoolingLogger(Spooler spooler) { this.spooler = spooler; this.executorService = new ScheduledThreadPoolExecutor(1, new DaemonThreadFactory("AbstractSpoolingLogger-send-")); this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS); }
this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS);
public AbstractSpoolingLogger(Spooler spooler) { this.spooler = spooler; this.executorService = new ScheduledThreadPoolExecutor(1, new DaemonThreadFactory("AbstractSpoolingLogger-send-")); this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS); }
class AbstractSpoolingLogger extends AbstractThreadedLogger implements Runnable { protected static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(Spooler.class.getName()); private final ScheduledExecutorService executorService; protected final Spooler spooler; @SuppressWarnings("unused") public AbstractSpoolingLogger() { this(new Spooler(Clock.systemUTC())); } public void run() { try { spooler.switchFileIfNeeded(); spooler.processFiles(this::transport); } catch (Exception e) { log.log(Level.WARNING, "Exception when processing files: " + e.getMessage()); } } @Override public boolean send(LoggerEntry entry) { log.log(Level.FINE, "Sending entry " + entry + " to spooler"); try { executor.execute(() -> spooler.write(entry)); } catch (RejectedExecutionException e) { return false; } return true; } @Deprecated /* @deprecated use {@link */ public void shutdown() { deconstruct(); } @Override public void deconstruct() { super.deconstruct(); executorService.shutdown(); try { if ( ! executorService.awaitTermination(10, TimeUnit.SECONDS)) log.log(Level.WARNING, "Timeout elapsed waiting for termination"); } catch (InterruptedException e) { log.log(Level.WARNING, "Failure when waiting for termination: " + e.getMessage()); } run(); } }
class AbstractSpoolingLogger extends AbstractThreadedLogger implements Runnable { protected static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(Spooler.class.getName()); private final ScheduledExecutorService executorService; protected final Spooler spooler; @SuppressWarnings("unused") public AbstractSpoolingLogger() { this(new Spooler(Clock.systemUTC())); } public void run() { try { spooler.switchFileIfNeeded(); spooler.processFiles(this::transport); } catch (Exception e) { log.log(Level.WARNING, "Exception when processing files: " + e.getMessage()); } } @Override public boolean send(LoggerEntry entry) { log.log(Level.FINE, "Sending entry " + entry + " to spooler"); try { executor.execute(() -> spooler.write(entry)); } catch (RejectedExecutionException e) { return false; } return true; } @Deprecated /* @deprecated use {@link */ public void shutdown() { deconstruct(); } @Override public void deconstruct() { super.deconstruct(); executorService.shutdown(); try { if ( ! executorService.awaitTermination(10, TimeUnit.SECONDS)) log.log(Level.WARNING, "Timeout elapsed waiting for termination"); } catch (InterruptedException e) { log.log(Level.WARNING, "Failure when waiting for termination: " + e.getMessage()); } run(); } }
Yes, that was my alternative solution, but customers subclass our default implementation, so I need to think of some way of making it work for both an implementation and a subclass of that again.
public AbstractSpoolingLogger(Spooler spooler) { this.spooler = spooler; this.executorService = new ScheduledThreadPoolExecutor(1, new DaemonThreadFactory("AbstractSpoolingLogger-send-")); this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS); }
this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS);
public AbstractSpoolingLogger(Spooler spooler) { this.spooler = spooler; this.executorService = new ScheduledThreadPoolExecutor(1, new DaemonThreadFactory("AbstractSpoolingLogger-send-")); this.executorService.scheduleWithFixedDelay(this, 5, 1L, TimeUnit.SECONDS); }
class AbstractSpoolingLogger extends AbstractThreadedLogger implements Runnable { protected static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(Spooler.class.getName()); private final ScheduledExecutorService executorService; protected final Spooler spooler; @SuppressWarnings("unused") public AbstractSpoolingLogger() { this(new Spooler(Clock.systemUTC())); } public void run() { try { spooler.switchFileIfNeeded(); spooler.processFiles(this::transport); } catch (Exception e) { log.log(Level.WARNING, "Exception when processing files: " + e.getMessage()); } } @Override public boolean send(LoggerEntry entry) { log.log(Level.FINE, "Sending entry " + entry + " to spooler"); try { executor.execute(() -> spooler.write(entry)); } catch (RejectedExecutionException e) { return false; } return true; } @Deprecated /* @deprecated use {@link */ public void shutdown() { deconstruct(); } @Override public void deconstruct() { super.deconstruct(); executorService.shutdown(); try { if ( ! executorService.awaitTermination(10, TimeUnit.SECONDS)) log.log(Level.WARNING, "Timeout elapsed waiting for termination"); } catch (InterruptedException e) { log.log(Level.WARNING, "Failure when waiting for termination: " + e.getMessage()); } run(); } }
class AbstractSpoolingLogger extends AbstractThreadedLogger implements Runnable { protected static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(Spooler.class.getName()); private final ScheduledExecutorService executorService; protected final Spooler spooler; @SuppressWarnings("unused") public AbstractSpoolingLogger() { this(new Spooler(Clock.systemUTC())); } public void run() { try { spooler.switchFileIfNeeded(); spooler.processFiles(this::transport); } catch (Exception e) { log.log(Level.WARNING, "Exception when processing files: " + e.getMessage()); } } @Override public boolean send(LoggerEntry entry) { log.log(Level.FINE, "Sending entry " + entry + " to spooler"); try { executor.execute(() -> spooler.write(entry)); } catch (RejectedExecutionException e) { return false; } return true; } @Deprecated /* @deprecated use {@link */ public void shutdown() { deconstruct(); } @Override public void deconstruct() { super.deconstruct(); executorService.shutdown(); try { if ( ! executorService.awaitTermination(10, TimeUnit.SECONDS)) log.log(Level.WARNING, "Timeout elapsed waiting for termination"); } catch (InterruptedException e) { log.log(Level.WARNING, "Failure when waiting for termination: " + e.getMessage()); } run(); } }
We should probably rely only on this?
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
if (Metric.redistributing.from(nodeValues) > 0) {
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
Yes, but I don't think the merge pending metric will be exposed in the "autoscale" set until the application upgrades?
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
if (Metric.redistributing.from(nodeValues) > 0) {
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
Good point!
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
if (Metric.redistributing.from(nodeValues) > 0) {
private boolean clusterIsStable(Node node, NodeList applicationNodes, ListMap<String, Double> nodeValues) { if (Metric.redistributing.from(nodeValues) > 0) { return false; } ClusterSpec cluster = node.allocation().get().membership().cluster(); return applicationNodes.cluster(cluster.id()).retired().isEmpty(); }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
class MetricsResponse { /** Node level metrics */ private final Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics; /** * Cluster level metrics. * Must be aggregated at fetch time to avoid issues with nodes and nodes joining/leaving the cluster over time. */ private final Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics = new HashMap<>(); /** Creates this from a metrics/V2 response */ public MetricsResponse(String response, NodeList applicationNodes) { this(SlimeUtils.jsonToSlime(response), applicationNodes); } public MetricsResponse(Collection<Pair<String, NodeMetricSnapshot>> metrics) { this.nodeMetrics = metrics; } private MetricsResponse(Slime response, NodeList applicationNodes) { nodeMetrics = new ArrayList<>(); Inspector root = response.get(); Inspector nodes = root.field("nodes"); nodes.traverse((ArrayTraverser)(__, node) -> consumeNode(node, applicationNodes)); } public Collection<Pair<String, NodeMetricSnapshot>> nodeMetrics() { return nodeMetrics; } public Map<ClusterSpec.Id, ClusterMetricSnapshot> clusterMetrics() { return clusterMetrics; } private void consumeNode(Inspector nodeObject, NodeList applicationNodes) { String hostname = nodeObject.field("hostname").asString(); Optional<Node> node = applicationNodes.node(hostname); if (node.isEmpty()) return; ListMap<String, Double> nodeValues = new ListMap<>(); Instant at = consumeNodeMetrics(nodeObject.field("node"), nodeValues); consumeServiceMetrics(nodeObject.field("services"), nodeValues); nodeMetrics.add(new Pair<>(hostname, new NodeMetricSnapshot(at, new Load(Metric.cpu.from(nodeValues), Metric.memory.from(nodeValues), Metric.disk.from(nodeValues)), (long)Metric.generation.from(nodeValues), Metric.inService.from(nodeValues) > 0, clusterIsStable(node.get(), applicationNodes, nodeValues), Metric.queryRate.from(nodeValues)))); var cluster = node.get().allocation().get().membership().cluster().id(); var metrics = clusterMetrics.getOrDefault(cluster, ClusterMetricSnapshot.empty(at)); metrics = metrics.withQueryRate(metrics.queryRate() + Metric.queryRate.from(nodeValues)); metrics = metrics.withWriteRate(metrics.queryRate() + Metric.writeRate.from(nodeValues)); clusterMetrics.put(cluster, metrics); } private Instant consumeNodeMetrics(Inspector nodeObject, ListMap<String, Double> nodeValues) { long timestampSecond = nodeObject.field("timestamp").asLong(); Instant at = Instant.ofEpochMilli(timestampSecond * 1000); nodeObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); return at; } private void consumeServiceMetrics(Inspector servicesObject, ListMap<String, Double> nodeValues) { servicesObject.traverse((ArrayTraverser) (__, item) -> consumeServiceItem(item, nodeValues)); } private void consumeServiceItem(Inspector serviceObject, ListMap<String, Double> nodeValues) { serviceObject.field("metrics").traverse((ArrayTraverser) (__, item) -> consumeMetricsItem(item, nodeValues)); } private void consumeMetricsItem(Inspector item, ListMap<String, Double> values) { item.field("values").traverse((ObjectTraverser)(name, value) -> values.put(name, value.asDouble())); } public static MetricsResponse empty() { return new MetricsResponse(List.of()); } /** The metrics this can read */ private enum Metric { cpu { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.CPU_UTIL.baseName(), HostedNodeAdminMetrics.GPU_UTIL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).max().orElse(0) / 100; } }, memory { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.MEM_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average(), HostedNodeAdminMetrics.GPU_MEM_USED.baseName(), HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()); } @Override double computeFinal(ListMap<String, Double> values) { return Math.max(gpuMemUtil(values), cpuMemUtil(values)); } private double cpuMemUtil(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_MEMORY.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.MEM_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } private double gpuMemUtil(ListMap<String, Double> values) { var usedGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_USED.baseName()).stream().mapToDouble(v -> v).sum(); var totalGpuMemory = values.get(HostedNodeAdminMetrics.GPU_MEM_TOTAL.baseName()).stream().mapToDouble(v -> v).sum(); return totalGpuMemory > 0 ? usedGpuMemory / totalGpuMemory : 0; } }, disk { @Override public List<String> metricResponseNames() { return List.of(HostedNodeAdminMetrics.DISK_UTIL.baseName(), SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); } @Override double computeFinal(ListMap<String, Double> values) { var valueList = values.get(SearchNodeMetrics.CONTENT_PROTON_RESOURCE_USAGE_DISK.average()); if ( ! valueList.isEmpty()) return valueList.get(0); valueList = values.get(HostedNodeAdminMetrics.DISK_UTIL.baseName()); if ( ! valueList.isEmpty()) return valueList.get(0) / 100; return 0; } }, generation { @Override public List<String> metricResponseNames() { return List.of(APPLICATION_GENERATION.last(), SearchNodeMetrics.CONTENT_PROTON_CONFIG_GENERATION.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).min().orElse(-1); } }, inService { @Override public List<String> metricResponseNames() { return List.of(IN_SERVICE.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v == 0) ? 0 : 1; } }, redistributing { @Override public List<String> metricResponseNames() { return List.of(DistributorMetrics.VDS_IDEALSTATE_MERGE_BUCKET_PENDING.last()); } @Override double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).anyMatch(v -> v > 0) ? 1 : 0; } }, queryRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.QUERIES.rate(), SearchNodeMetrics.CONTENT_PROTON_DOCUMENTDB_MATCHING_QUERIES.rate()); } }, writeRate { @Override public List<String> metricResponseNames() { return List.of(ContainerMetrics.FEED_HTTP_REQUESTS.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_PUT_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_REMOVE_COUNT.rate(), StorageMetrics.VDS_FILESTOR_ALLTHREADS_UPDATE_COUNT.rate()); } }; /** * The names of this metric as emitted from its source. * A map of the values of these names which were present in the response will * be provided to computeFinal to decide on a single value. */ public abstract List<String> metricResponseNames(); /** Computes the final metric value */ double computeFinal(ListMap<String, Double> values) { return values.values().stream().flatMap(List::stream).mapToDouble(v -> v).sum(); } public double from(ListMap<String, Double> metricValues) { ListMap<String, Double> values = new ListMap<>(metricValues); values.keySet().retainAll(metricResponseNames()); return computeFinal(values); } } }
So only if both exist, can they be different.. ?
private Optional<String> modelChanged(OnnxModelCost.ModelInfo a, OnnxModelCost.ModelInfo b) { if (a.estimatedCost() != b.estimatedCost()) return Optional.of("estimated cost"); if (a.hash() != b.hash()) return Optional.of("model hash"); if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isPresent() && ! a.onnxModelOptions().get().equals(b.onnxModelOptions().get())) return Optional.of("model option(s)"); return Optional.empty(); }
if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isPresent()
private Optional<String> modelChanged(OnnxModelCost.ModelInfo a, OnnxModelCost.ModelInfo b) { if (a.estimatedCost() != b.estimatedCost()) return Optional.of("estimated cost"); if (a.hash() != b.hash()) return Optional.of("model hash"); if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isEmpty()) return Optional.of("model option(s)"); if (a.onnxModelOptions().isEmpty() && b.onnxModelOptions().isPresent()) return Optional.of("model option(s)"); if (a.onnxModelOptions().isPresent() && ! a.onnxModelOptions().get().equals(b.onnxModelOptions().get())) return Optional.of("model option(s)"); return Optional.empty(); }
class RestartOnDeployForOnnxModelChangesValidator implements ChangeValidator { private static final Logger log = Logger.getLogger(RestartOnDeployForOnnxModelChangesValidator.class.getName()); @Override public List<ConfigChangeAction> validate(VespaModel currentModel, VespaModel nextModel, DeployState deployState) { if ( ! deployState.featureFlags().restartOnDeployWhenOnnxModelChanges()) return List.of(); List<ConfigChangeAction> actions = new ArrayList<>(); for (var cluster : nextModel.getContainerClusters().values()) { var clusterInCurrentModel = currentModel.getContainerClusters().get(cluster.getName()); if (clusterInCurrentModel == null) continue; log.log(FINE, "Validating cluster '" + cluster.name() + "'"); var currentModels = clusterInCurrentModel.onnxModelCostCalculator().models(); var nextModels = cluster.onnxModelCostCalculator().models(); log.log(FINE, "current models=" + currentModels + ", next models=" + nextModels); for (var nextModelInfo : nextModels.values()) { if (!currentModels.containsKey(nextModelInfo.modelId())) continue; log.log(FINE, "Checking if " + nextModelInfo + " has changed"); modelChanged(nextModelInfo, currentModels.get(nextModelInfo.modelId())).ifPresent(change -> { String message = "Onnx model '%s' has changed (%s), need to restart services in container cluster '%s'" .formatted(nextModelInfo.modelId(), change, cluster.name()); cluster.onnxModelCostCalculator().setRestartOnDeploy(); actions.add(new VespaRestartAction(cluster.id(), message)); }); } } return actions; } }
class RestartOnDeployForOnnxModelChangesValidator implements ChangeValidator { private static final Logger log = Logger.getLogger(RestartOnDeployForOnnxModelChangesValidator.class.getName()); @Override public List<ConfigChangeAction> validate(VespaModel currentModel, VespaModel nextModel, DeployState deployState) { if ( ! deployState.featureFlags().restartOnDeployWhenOnnxModelChanges()) return List.of(); List<ConfigChangeAction> actions = new ArrayList<>(); for (var cluster : nextModel.getContainerClusters().values()) { var clusterInCurrentModel = currentModel.getContainerClusters().get(cluster.getName()); if (clusterInCurrentModel == null) continue; log.log(FINE, "Validating cluster '" + cluster.name() + "'"); var currentModels = clusterInCurrentModel.onnxModelCostCalculator().models(); var nextModels = cluster.onnxModelCostCalculator().models(); log.log(FINE, "current models=" + currentModels + ", next models=" + nextModels); for (var nextModelInfo : nextModels.values()) { if (!currentModels.containsKey(nextModelInfo.modelId())) continue; log.log(FINE, "Checking if " + nextModelInfo + " has changed"); modelChanged(nextModelInfo, currentModels.get(nextModelInfo.modelId())).ifPresent(change -> { String message = "Onnx model '%s' has changed (%s), need to restart services in container cluster '%s'" .formatted(nextModelInfo.modelId(), change, cluster.name()); cluster.onnxModelCostCalculator().setRestartOnDeploy(); actions.add(new VespaRestartAction(cluster.id(), message)); }); } } return actions; } }
We only care about them if they are both present, really. Not having options is legacy (apps on newer versions always have them, will be simplified when all apps have upgraded to a newer version). But I will fix it
private Optional<String> modelChanged(OnnxModelCost.ModelInfo a, OnnxModelCost.ModelInfo b) { if (a.estimatedCost() != b.estimatedCost()) return Optional.of("estimated cost"); if (a.hash() != b.hash()) return Optional.of("model hash"); if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isPresent() && ! a.onnxModelOptions().get().equals(b.onnxModelOptions().get())) return Optional.of("model option(s)"); return Optional.empty(); }
if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isPresent()
private Optional<String> modelChanged(OnnxModelCost.ModelInfo a, OnnxModelCost.ModelInfo b) { if (a.estimatedCost() != b.estimatedCost()) return Optional.of("estimated cost"); if (a.hash() != b.hash()) return Optional.of("model hash"); if (a.onnxModelOptions().isPresent() && b.onnxModelOptions().isEmpty()) return Optional.of("model option(s)"); if (a.onnxModelOptions().isEmpty() && b.onnxModelOptions().isPresent()) return Optional.of("model option(s)"); if (a.onnxModelOptions().isPresent() && ! a.onnxModelOptions().get().equals(b.onnxModelOptions().get())) return Optional.of("model option(s)"); return Optional.empty(); }
class RestartOnDeployForOnnxModelChangesValidator implements ChangeValidator { private static final Logger log = Logger.getLogger(RestartOnDeployForOnnxModelChangesValidator.class.getName()); @Override public List<ConfigChangeAction> validate(VespaModel currentModel, VespaModel nextModel, DeployState deployState) { if ( ! deployState.featureFlags().restartOnDeployWhenOnnxModelChanges()) return List.of(); List<ConfigChangeAction> actions = new ArrayList<>(); for (var cluster : nextModel.getContainerClusters().values()) { var clusterInCurrentModel = currentModel.getContainerClusters().get(cluster.getName()); if (clusterInCurrentModel == null) continue; log.log(FINE, "Validating cluster '" + cluster.name() + "'"); var currentModels = clusterInCurrentModel.onnxModelCostCalculator().models(); var nextModels = cluster.onnxModelCostCalculator().models(); log.log(FINE, "current models=" + currentModels + ", next models=" + nextModels); for (var nextModelInfo : nextModels.values()) { if (!currentModels.containsKey(nextModelInfo.modelId())) continue; log.log(FINE, "Checking if " + nextModelInfo + " has changed"); modelChanged(nextModelInfo, currentModels.get(nextModelInfo.modelId())).ifPresent(change -> { String message = "Onnx model '%s' has changed (%s), need to restart services in container cluster '%s'" .formatted(nextModelInfo.modelId(), change, cluster.name()); cluster.onnxModelCostCalculator().setRestartOnDeploy(); actions.add(new VespaRestartAction(cluster.id(), message)); }); } } return actions; } }
class RestartOnDeployForOnnxModelChangesValidator implements ChangeValidator { private static final Logger log = Logger.getLogger(RestartOnDeployForOnnxModelChangesValidator.class.getName()); @Override public List<ConfigChangeAction> validate(VespaModel currentModel, VespaModel nextModel, DeployState deployState) { if ( ! deployState.featureFlags().restartOnDeployWhenOnnxModelChanges()) return List.of(); List<ConfigChangeAction> actions = new ArrayList<>(); for (var cluster : nextModel.getContainerClusters().values()) { var clusterInCurrentModel = currentModel.getContainerClusters().get(cluster.getName()); if (clusterInCurrentModel == null) continue; log.log(FINE, "Validating cluster '" + cluster.name() + "'"); var currentModels = clusterInCurrentModel.onnxModelCostCalculator().models(); var nextModels = cluster.onnxModelCostCalculator().models(); log.log(FINE, "current models=" + currentModels + ", next models=" + nextModels); for (var nextModelInfo : nextModels.values()) { if (!currentModels.containsKey(nextModelInfo.modelId())) continue; log.log(FINE, "Checking if " + nextModelInfo + " has changed"); modelChanged(nextModelInfo, currentModels.get(nextModelInfo.modelId())).ifPresent(change -> { String message = "Onnx model '%s' has changed (%s), need to restart services in container cluster '%s'" .formatted(nextModelInfo.modelId(), change, cluster.name()); cluster.onnxModelCostCalculator().setRestartOnDeploy(); actions.add(new VespaRestartAction(cluster.id(), message)); }); } } return actions; } }
Indentation is off.
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
if (!(object instanceof IndexCommand other)) {
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } if (fieldType instanceof CollectionDataType && DataType.URI.equals(((CollectionDataType)fieldType).getNestedType())) { return true; } return false; } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(command.index()) .command(command.command())); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias( new IndexInfoConfig.Indexinfo.Alias.Builder() .alias(e.getKey()) .indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(fieldSet.getName()).command(qc)); boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_LOWERCASE)); } if (hasMultiValueField(fieldSet)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_MULTIVALUE)); } if (anyIndexing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_PLAIN_TOKENS)); } if (anyStemming) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(stemmingCommand)); } if (anyNormalizing) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_NORMALIZE)); if (phraseSegmentingCommand != null) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(phraseSegmentingCommand)); } } else { iiB .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_ATTRIBUTE)) .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("exact "+term)); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_WORD)); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } if (active != null) { return active; } return Stemming.BEST; } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; if (m.getType().equals(MatchType.EXACT)) return true; if (m.getType().equals(MatchType.WORD)) return true; return false; } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if (Stemming.NONE.equals(indexStemming)) { } else { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } return (fieldType instanceof CollectionDataType collectionFieldType) && DataType.URI.equals(collectionFieldType.getNestedType()); } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { addIndexCommand(iiB, command.index(), command.command()); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias(new IndexInfoConfig.Indexinfo.Alias.Builder().alias(e.getKey()).indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) { addIndexCommand(iiB, fieldSet.getName(), qc); } boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { addIndexCommand(iiB, fieldSet.getName(), CMD_LOWERCASE); } if (hasMultiValueField(fieldSet)) { addIndexCommand(iiB, fieldSet.getName(), CMD_MULTIVALUE); } if (anyIndexing) { addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { addIndexCommand(iiB, fieldSet.getName(), CMD_PLAIN_TOKENS); } if (anyStemming) { addIndexCommand(iiB, fieldSet.getName(), stemmingCommand); } if (anyNormalizing) addIndexCommand(iiB, fieldSet.getName(), CMD_NORMALIZE); if (phraseSegmentingCommand != null) addIndexCommand(iiB, fieldSet.getName(), phraseSegmentingCommand); } } else { addIndexCommand(iiB, fieldSet.getName(), CMD_ATTRIBUTE); addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; addIndexCommand(iiB, fieldSet.getName(), "exact "+term); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { addIndexCommand(iiB, fieldSet.getName(), CMD_WORD); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { addIndexCommand(iiB, fieldSet.getName(), "ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE)); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } return Objects.requireNonNullElse(active, Stemming.BEST); } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; return m.getType().equals(MatchType.EXACT) || m.getType().equals(MatchType.WORD); } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if ( ! Stemming.NONE.equals(indexStemming)) { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
Indentation is off.
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
public String toString() {
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } if (fieldType instanceof CollectionDataType && DataType.URI.equals(((CollectionDataType)fieldType).getNestedType())) { return true; } return false; } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(command.index()) .command(command.command())); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias( new IndexInfoConfig.Indexinfo.Alias.Builder() .alias(e.getKey()) .indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(fieldSet.getName()).command(qc)); boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_LOWERCASE)); } if (hasMultiValueField(fieldSet)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_MULTIVALUE)); } if (anyIndexing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_PLAIN_TOKENS)); } if (anyStemming) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(stemmingCommand)); } if (anyNormalizing) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_NORMALIZE)); if (phraseSegmentingCommand != null) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(phraseSegmentingCommand)); } } else { iiB .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_ATTRIBUTE)) .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("exact "+term)); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_WORD)); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } if (active != null) { return active; } return Stemming.BEST; } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; if (m.getType().equals(MatchType.EXACT)) return true; if (m.getType().equals(MatchType.WORD)) return true; return false; } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if (Stemming.NONE.equals(indexStemming)) { } else { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } return (fieldType instanceof CollectionDataType collectionFieldType) && DataType.URI.equals(collectionFieldType.getNestedType()); } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { addIndexCommand(iiB, command.index(), command.command()); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias(new IndexInfoConfig.Indexinfo.Alias.Builder().alias(e.getKey()).indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) { addIndexCommand(iiB, fieldSet.getName(), qc); } boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { addIndexCommand(iiB, fieldSet.getName(), CMD_LOWERCASE); } if (hasMultiValueField(fieldSet)) { addIndexCommand(iiB, fieldSet.getName(), CMD_MULTIVALUE); } if (anyIndexing) { addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { addIndexCommand(iiB, fieldSet.getName(), CMD_PLAIN_TOKENS); } if (anyStemming) { addIndexCommand(iiB, fieldSet.getName(), stemmingCommand); } if (anyNormalizing) addIndexCommand(iiB, fieldSet.getName(), CMD_NORMALIZE); if (phraseSegmentingCommand != null) addIndexCommand(iiB, fieldSet.getName(), phraseSegmentingCommand); } } else { addIndexCommand(iiB, fieldSet.getName(), CMD_ATTRIBUTE); addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; addIndexCommand(iiB, fieldSet.getName(), "exact "+term); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { addIndexCommand(iiB, fieldSet.getName(), CMD_WORD); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { addIndexCommand(iiB, fieldSet.getName(), "ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE)); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } return Objects.requireNonNullElse(active, Stemming.BEST); } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; return m.getType().equals(MatchType.EXACT) || m.getType().equals(MatchType.WORD); } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if ( ! Stemming.NONE.equals(indexStemming)) { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
Consider adding a helper function for test_exact_string(), test_string(), and test_cased_string() to reduce code duplication. The only differences are the Matching config and the expected normalize setting, and arg1 in VsmfieldsConfig.
void test_exact_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.EXACT).setCase(Case.CASED)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, fieldSpec.normalize()); assertEquals("exact", fieldSpec.arg1()); }
Schema schema = createSchema();
void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); }
class VsmFieldsTestCase { private static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder); return cfgBuilder.build(); } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } @Test @Test void test_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.TEXT)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, fieldSpec.normalize()); assertEquals("", fieldSpec.arg1()); } @Test void test_cased_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.TEXT).setCase(Case.CASED)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.NONE, fieldSpec.normalize()); assertEquals("", fieldSpec.arg1()); } }
class VsmFieldsTestCase { private static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder); return cfgBuilder.build(); } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test }
Fixed
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
if (!(object instanceof IndexCommand other)) {
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } if (fieldType instanceof CollectionDataType && DataType.URI.equals(((CollectionDataType)fieldType).getNestedType())) { return true; } return false; } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(command.index()) .command(command.command())); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias( new IndexInfoConfig.Indexinfo.Alias.Builder() .alias(e.getKey()) .indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(fieldSet.getName()).command(qc)); boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_LOWERCASE)); } if (hasMultiValueField(fieldSet)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_MULTIVALUE)); } if (anyIndexing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_PLAIN_TOKENS)); } if (anyStemming) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(stemmingCommand)); } if (anyNormalizing) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_NORMALIZE)); if (phraseSegmentingCommand != null) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(phraseSegmentingCommand)); } } else { iiB .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_ATTRIBUTE)) .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("exact "+term)); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_WORD)); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } if (active != null) { return active; } return Stemming.BEST; } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; if (m.getType().equals(MatchType.EXACT)) return true; if (m.getType().equals(MatchType.WORD)) return true; return false; } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if (Stemming.NONE.equals(indexStemming)) { } else { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } return (fieldType instanceof CollectionDataType collectionFieldType) && DataType.URI.equals(collectionFieldType.getNestedType()); } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { addIndexCommand(iiB, command.index(), command.command()); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias(new IndexInfoConfig.Indexinfo.Alias.Builder().alias(e.getKey()).indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) { addIndexCommand(iiB, fieldSet.getName(), qc); } boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { addIndexCommand(iiB, fieldSet.getName(), CMD_LOWERCASE); } if (hasMultiValueField(fieldSet)) { addIndexCommand(iiB, fieldSet.getName(), CMD_MULTIVALUE); } if (anyIndexing) { addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { addIndexCommand(iiB, fieldSet.getName(), CMD_PLAIN_TOKENS); } if (anyStemming) { addIndexCommand(iiB, fieldSet.getName(), stemmingCommand); } if (anyNormalizing) addIndexCommand(iiB, fieldSet.getName(), CMD_NORMALIZE); if (phraseSegmentingCommand != null) addIndexCommand(iiB, fieldSet.getName(), phraseSegmentingCommand); } } else { addIndexCommand(iiB, fieldSet.getName(), CMD_ATTRIBUTE); addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; addIndexCommand(iiB, fieldSet.getName(), "exact "+term); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { addIndexCommand(iiB, fieldSet.getName(), CMD_WORD); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { addIndexCommand(iiB, fieldSet.getName(), "ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE)); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } return Objects.requireNonNullElse(active, Stemming.BEST); } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; return m.getType().equals(MatchType.EXACT) || m.getType().equals(MatchType.WORD); } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if ( ! Stemming.NONE.equals(indexStemming)) { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
Fixed
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
public String toString() {
public record IndexCommand(String index, String command) { /** * Returns true if this is the null command (do nothing) */ public boolean isNull() { return command.isEmpty(); } public boolean equals(Object object) { if (!(object instanceof IndexCommand other)) { return false; } return other.index.equals(this.index) && other.command.equals(this.command); } public String toString() { return "index command " + command + " on index " + index; } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } if (fieldType instanceof CollectionDataType && DataType.URI.equals(((CollectionDataType)fieldType).getNestedType())) { return true; } return false; } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(command.index()) .command(command.command())); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias( new IndexInfoConfig.Indexinfo.Alias.Builder() .alias(e.getKey()) .indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(fieldSet.getName()).command(qc)); boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_LOWERCASE)); } if (hasMultiValueField(fieldSet)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_MULTIVALUE)); } if (anyIndexing) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_PLAIN_TOKENS)); } if (anyStemming) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(stemmingCommand)); } if (anyNormalizing) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_NORMALIZE)); if (phraseSegmentingCommand != null) iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(phraseSegmentingCommand)); } } else { iiB .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_ATTRIBUTE)) .command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_INDEX)); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("exact "+term)); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command(CMD_WORD)); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { iiB.command( new IndexInfoConfig.Indexinfo.Command.Builder() .indexname(fieldSet.getName()) .command("ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } if (active != null) { return active; } return Stemming.BEST; } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; if (m.getType().equals(MatchType.EXACT)) return true; if (m.getType().equals(MatchType.WORD)) return true; return false; } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if (Stemming.NONE.equals(indexStemming)) { } else { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
class IndexInfo extends Derived implements IndexInfoConfig.Producer { private static final String CMD_ATTRIBUTE = "attribute"; private static final String CMD_DEFAULT_POSITION = "default-position"; private static final String CMD_DYNTEASER = "dynteaser"; private static final String CMD_FULLURL = "fullurl"; private static final String CMD_HIGHLIGHT = "highlight"; private static final String CMD_INDEX = "index"; private static final String CMD_LOWERCASE = "lowercase"; private static final String CMD_NORMALIZE = "normalize"; private static final String CMD_STEM = "stem"; private static final String CMD_URLHOST = "urlhost"; private static final String CMD_WORD = "word"; private static final String CMD_PLAIN_TOKENS = "plain-tokens"; private static final String CMD_MULTIVALUE = "multivalue"; private static final String CMD_FAST_SEARCH = "fast-search"; private static final String CMD_PREDICATE = "predicate"; private static final String CMD_PREDICATE_BOUNDS = "predicate-bounds"; private static final String CMD_NUMERICAL = "numerical"; private static final String CMD_INTEGER = "integer"; private static final String CMD_STRING = "string"; private static final String CMD_PHRASE_SEGMENTING = "phrase-segmenting"; private final boolean isStreaming; private final Set<IndexCommand> commands = new java.util.LinkedHashSet<>(); private final Map<String, String> aliases = new java.util.LinkedHashMap<>(); private final Map<String, FieldSet> fieldSets; private Schema schema; public IndexInfo(Schema schema, boolean isStreaming) { this.isStreaming = isStreaming; this.fieldSets = schema.fieldSets().userFieldSets(); addIndexCommand("sddocname", CMD_INDEX); addIndexCommand("sddocname", CMD_WORD); derive(schema); } @Override protected void derive(Schema schema) { super.derive(schema); this.schema = schema; for (FieldSet fs : fieldSets.values()) { for (String fieldName : fs.getFieldNames()) { fs.fields().add(schema.getField(fieldName)); } } for (Index index : schema.getExplicitIndices()) { derive(index, schema); } for (SummaryField summaryField : schema.getUniqueNamedSummaryFields().values()) { if (summaryField.getTransform().isTeaser()) { addIndexCommand(summaryField.getName(), CMD_DYNTEASER); } if (summaryField.getTransform().isBolded()) { addIndexCommand(summaryField.getName(), CMD_HIGHLIGHT); } var sourceField = schema.getField(summaryField.getSourceField()); if (sourceField != null && sourceField.getMatching().getType().equals(MatchType.GRAM)) { addIndexCommand(summaryField.getName(), "ngram " + (sourceField.getMatching().getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE))); } } } private static boolean isPositionField(ImmutableSDField field) { return GeoPos.isAnyPos(field); } @Override protected void derive(ImmutableSDField field, Schema schema) { derive(field, schema, false); } protected void derive(ImmutableSDField field, Schema schema, boolean inPosition) { if (field.getDataType().equals(DataType.PREDICATE)) { addIndexCommand(field, CMD_PREDICATE); Index index = field.getIndex(field.getName()); if (index != null) { BooleanIndexDefinition options = index.getBooleanIndexDefiniton(); if (options.hasLowerBound() || options.hasUpperBound()) { addIndexCommand(field.getName(), CMD_PREDICATE_BOUNDS + " [" + (options.hasLowerBound() ? Long.toString(options.getLowerBound()) : "") + ".." + (options.hasUpperBound() ? Long.toString(options.getUpperBound()) : "") + "]"); } } } for (Map.Entry<String, String> e : field.getAliasToName().entrySet()) { String alias = e.getKey(); String name = e.getValue(); addIndexAlias(alias, name); } boolean isPosition = isPositionField(field); if (field.usesStructOrMap()) { for (ImmutableSDField structField : field.getStructFields()) { derive(structField, schema, isPosition); } } if (isPosition) { addIndexCommand(field.getName(), CMD_DEFAULT_POSITION); } for (var index : field.getIndices().values()) { addIndexCommand(index.getName(), CMD_INDEX); } if (needLowerCase(field)) { addIndexCommand(field, CMD_LOWERCASE); } if (field.getDataType().isMultivalue()) { addIndexCommand(field, CMD_MULTIVALUE); } Attribute attribute = field.getAttribute(); if ((field.doesAttributing() || (attribute != null && !inPosition)) && !field.doesIndexing()) { addIndexCommand(field.getName(), CMD_ATTRIBUTE); if (attribute != null && attribute.isFastSearch()) addIndexCommand(field.getName(), CMD_FAST_SEARCH); } else if (field.doesIndexing()) { if (stemSomehow(field, schema)) { addIndexCommand(field, stemCmd(field, schema), new StemmingOverrider(this, schema)); } if (normalizeAccents(field)) { addIndexCommand(field, CMD_NORMALIZE); } if (field.getMatching() == null || field.getMatching().getType().equals(MatchType.TEXT)) { addIndexCommand(field, CMD_PLAIN_TOKENS); } } if (isUriField(field)) { addUriIndexCommands(field); } if (field.getDataType().getPrimitiveType() instanceof NumericDataType) { addIndexCommand(field, CMD_NUMERICAL); if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { addIndexCommand(field, CMD_INTEGER); } } if (isTypeOrNested(field, DataType.STRING)) { addIndexCommand(field, CMD_STRING); } for (String command : field.getQueryCommands()) { addIndexCommand(field, command); } } private static boolean isAnyChildString(DataType dataType) { PrimitiveDataType primitive = dataType.getPrimitiveType(); if (primitive == PrimitiveDataType.STRING) return true; if (primitive != null) return false; if (dataType instanceof StructuredDataType structured) { for (Field field : structured.getFields()) { if (isAnyChildString(field.getDataType())) return true; } } else if (dataType instanceof MapDataType mapType) { return isAnyChildString(mapType.getKeyType()) || isAnyChildString(mapType.getValueType()); } return false; } private static boolean needLowerCase(ImmutableSDField field) { return ( field.doesIndexing() && field.getMatching().getCase() != Case.CASED) || field.doesLowerCasing() || ((field.doesAttributing() || (field.getAttribute() != null)) && isAnyChildString(field.getDataType()) && field.getMatching().getCase().equals(Case.UNCASED)); } static String stemCmd(ImmutableSDField field, Schema schema) { return CMD_STEM + ":" + field.getStemming(schema).toStemMode(); } private boolean stemSomehow(ImmutableSDField field, Schema schema) { if (field.getStemming(schema).equals(Stemming.NONE)) return false; return isTypeOrNested(field, DataType.STRING); } private boolean normalizeAccents(ImmutableSDField field) { return !isStreaming && field.getNormalizing().doRemoveAccents() && isTypeOrNested(field, DataType.STRING); } private boolean isTypeOrNested(ImmutableSDField field, DataType type) { return field.getDataType().equals(type) || field.getDataType().equals(DataType.getArray(type)) || field.getDataType().equals(DataType.getWeightedSet(type)); } private boolean isUriField(ImmutableSDField field) { DataType fieldType = field.getDataType(); if (DataType.URI.equals(fieldType)) { return true; } return (fieldType instanceof CollectionDataType collectionFieldType) && DataType.URI.equals(collectionFieldType.getNestedType()); } private void addUriIndexCommands(ImmutableSDField field) { String fieldName = field.getName(); addIndexCommand(fieldName, CMD_FULLURL); addIndexCommand(fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + "." + fieldName, CMD_FULLURL); addIndexCommand(fieldName + "." + fieldName, CMD_LOWERCASE); addIndexCommand(fieldName + ".path", CMD_FULLURL); addIndexCommand(fieldName + ".path", CMD_LOWERCASE); addIndexCommand(fieldName + ".query", CMD_FULLURL); addIndexCommand(fieldName + ".query", CMD_LOWERCASE); addIndexCommand(fieldName + ".hostname", CMD_URLHOST); addIndexCommand(fieldName + ".hostname", CMD_LOWERCASE); Index index = field.getIndex("hostname"); if (index != null) { addIndexCommand(index, CMD_URLHOST); } } /** * Sets a command for all indices of a field */ private void addIndexCommand(Index index, String command) { addIndexCommand(index.getName(), command); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command) { addIndexCommand(field, command, null); } /** * Sets a command for all indices of a field */ private void addIndexCommand(ImmutableSDField field, String command, IndexOverrider overrider) { if (overrider == null || !overrider.override(field.getName(), command, field)) { addIndexCommand(field.getName(), command); } } private void addIndexCommand(String indexName, String command) { commands.add(new IndexCommand(indexName, command)); } private static void addIndexCommand(IndexInfoConfig.Indexinfo.Builder iiB, String indexName, String command) { iiB.command(new IndexInfoConfig.Indexinfo.Command.Builder().indexname(indexName).command(command)); } private void addIndexAlias(String alias, String indexName) { aliases.put(alias, indexName); } /** * Returns whether a particular command is prsent in this index info */ public boolean hasCommand(String indexName, String command) { return commands.contains(new IndexCommand(indexName, command)); } private boolean notInCommands(String index) { for (IndexCommand command : commands) { if (command.index().equals(index)) { return false; } } return true; } @Override public void getConfig(IndexInfoConfig.Builder builder) { IndexInfoConfig.Indexinfo.Builder iiB = new IndexInfoConfig.Indexinfo.Builder(); iiB.name(getName()); for (IndexCommand command : commands) { addIndexCommand(iiB, command.index(), command.command()); } for (FieldSet fieldSet : fieldSets.values()) { if (notInCommands(fieldSet.getName())) { addFieldSetCommands(iiB, fieldSet); } } for (Map.Entry<String, String> e : aliases.entrySet()) { iiB.alias(new IndexInfoConfig.Indexinfo.Alias.Builder().alias(e.getKey()).indexname(e.getValue())); } builder.indexinfo(iiB); } private void addFieldSetCommands(IndexInfoConfig.Indexinfo.Builder iiB, FieldSet fieldSet) { for (String qc : fieldSet.queryCommands()) { addIndexCommand(iiB, fieldSet.getName(), qc); } boolean anyIndexing = false; boolean anyAttributing = false; boolean anyLowerCasing = false; boolean anyStemming = false; boolean anyNormalizing = false; boolean anyString = false; boolean anyInteger = false; String phraseSegmentingCommand = null; String stemmingCommand = null; Matching fieldSetMatching = fieldSet.getMatching(); for (ImmutableSDField field : fieldSet.fields()) { if (field.doesIndexing()) { anyIndexing = true; } if (field.doesAttributing()) { anyAttributing = true; } if (needLowerCase(field)) { anyLowerCasing = true; } if (stemming(field)) { anyStemming = true; stemmingCommand = CMD_STEM + ":" + getEffectiveStemming(field).toStemMode(); } if (normalizeAccents(field)) { anyNormalizing = true; } if (isTypeOrNested(field, DataType.STRING)) { anyString = true; } if (fieldSetMatching == null && field.getMatching().getType() != Matching.defaultType) { fieldSetMatching = field.getMatching(); } Optional<String> explicitPhraseSegmentingCommand = field.getQueryCommands().stream().filter(c -> c.startsWith(CMD_PHRASE_SEGMENTING)).findFirst(); if (explicitPhraseSegmentingCommand.isPresent()) { phraseSegmentingCommand = explicitPhraseSegmentingCommand.get(); } if (isTypeOrNested(field, DataType.INT) || isTypeOrNested(field, DataType.LONG) || isTypeOrNested(field, DataType.BYTE)) { anyInteger = true; } } if (anyIndexing && anyAttributing && fieldSet.getMatching() == null) { fieldSetMatching = new Matching(); } if (anyLowerCasing) { addIndexCommand(iiB, fieldSet.getName(), CMD_LOWERCASE); } if (hasMultiValueField(fieldSet)) { addIndexCommand(iiB, fieldSet.getName(), CMD_MULTIVALUE); } if (anyIndexing) { addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); if ( ! isExactMatch(fieldSetMatching)) { if (fieldSetMatching == null || fieldSetMatching.getType().equals(MatchType.TEXT)) { addIndexCommand(iiB, fieldSet.getName(), CMD_PLAIN_TOKENS); } if (anyStemming) { addIndexCommand(iiB, fieldSet.getName(), stemmingCommand); } if (anyNormalizing) addIndexCommand(iiB, fieldSet.getName(), CMD_NORMALIZE); if (phraseSegmentingCommand != null) addIndexCommand(iiB, fieldSet.getName(), phraseSegmentingCommand); } } else { addIndexCommand(iiB, fieldSet.getName(), CMD_ATTRIBUTE); addIndexCommand(iiB, fieldSet.getName(), CMD_INDEX); } if (anyString) { addIndexCommand(iiB, fieldSet.getName(), CMD_STRING); } if (anyInteger) { addIndexCommand(iiB, fieldSet.getName(), CMD_INTEGER); } if (fieldSetMatching != null) { if (fieldSetMatching.getType().equals(MatchType.EXACT)) { String term = fieldSetMatching.getExactMatchTerminator(); if (term==null) term=ExactMatch.DEFAULT_EXACT_TERMINATOR; addIndexCommand(iiB, fieldSet.getName(), "exact "+term); } else if (fieldSetMatching.getType().equals(MatchType.WORD)) { addIndexCommand(iiB, fieldSet.getName(), CMD_WORD); } else if (fieldSetMatching.getType().equals(MatchType.GRAM)) { addIndexCommand(iiB, fieldSet.getName(), "ngram " + fieldSetMatching.getGramSize().orElse(NGramMatch.DEFAULT_GRAM_SIZE)); } else if (fieldSetMatching.getType().equals(MatchType.TEXT)) { } } } private boolean hasMultiValueField(FieldSet fieldSet) { for (ImmutableSDField field : fieldSet.fields()) { if (field.getDataType().isMultivalue()) return true; } return false; } private Stemming getEffectiveStemming(ImmutableSDField field) { Stemming active = field.getStemming(schema); if (field.getIndex(field.getName()) != null) { if (field.getIndex(field.getName()).getStemming()!=null) { active = field.getIndex(field.getName()).getStemming(); } } return Objects.requireNonNullElse(active, Stemming.BEST); } private boolean stemming(ImmutableSDField field) { if (field.getStemming() != null) { return !field.getStemming().equals(Stemming.NONE); } if (schema.getStemming() == Stemming.NONE) return false; if (field.isImportedField()) return false; if (field.getIndex(field.getName())==null) return true; if (field.getIndex(field.getName()).getStemming()==null) return true; return !(field.getIndex(field.getName()).getStemming().equals(Stemming.NONE)); } private boolean isExactMatch(Matching m) { if (m == null) return false; return m.getType().equals(MatchType.EXACT) || m.getType().equals(MatchType.WORD); } @Override protected String getDerivedName() { return "index-info"; } /** * An index command. Null commands are also represented, to detect consistency issues. This is an (immutable) value * object. */ /** * A command which may override the command setting of a field for a particular index */ private static abstract class IndexOverrider { protected final IndexInfo owner; public IndexOverrider(IndexInfo owner) { this.owner = owner; } /** * Override the setting of this index for this field, returns true if overriden, false if this index should be * set according to the field */ public abstract boolean override(String indexName, String command, ImmutableSDField field); } private static class StemmingOverrider extends IndexOverrider { private final Schema schema; public StemmingOverrider(IndexInfo owner, Schema schema) { super(owner); this.schema = schema; } public boolean override(String indexName, String command, ImmutableSDField field) { if (schema == null) { return false; } Index index = schema.getIndex(indexName); if (index == null) { return false; } Stemming indexStemming = index.getStemming(); if (indexStemming == null) { return false; } if ( ! Stemming.NONE.equals(indexStemming)) { owner.addIndexCommand(indexName, CMD_STEM + ":" + indexStemming.toStemMode()); } return true; } } }
Fixed
void test_exact_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.EXACT).setCase(Case.CASED)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, fieldSpec.normalize()); assertEquals("exact", fieldSpec.arg1()); }
Schema schema = createSchema();
void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); }
class VsmFieldsTestCase { private static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder); return cfgBuilder.build(); } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } @Test @Test void test_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.TEXT)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, fieldSpec.normalize()); assertEquals("", fieldSpec.arg1()); } @Test void test_cased_string() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(new Matching(MatchType.TEXT).setCase(Case.CASED)); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(VsmfieldsConfig.Fieldspec.Normalize.NONE, fieldSpec.normalize()); assertEquals("", fieldSpec.arg1()); } }
class VsmFieldsTestCase { private static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder); return cfgBuilder.build(); } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test }
This will hide exceptions thrown in the `try { }` block. Consider re-throwing those, with any additional exceptions here as suppressed.
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
if (exception != null) throw exception;
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
The exception will only be hidden if `finally` throws a new exception right? That is acceptable imho.
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
if (exception != null) throw exception;
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
Well, not if `finally` is very likely to throw, due to the previous exception. I can't tell if that's the case here, though.
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
if (exception != null) throw exception;
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
Discussed offline, added some TODOs
public ApplicationFile delete() { if (file.isDirectory()) { if (!listFiles().isEmpty()) throw new RuntimeException("Can't delete, directory not empty: " + this + "(" + listFiles() + ")." + listFiles().size()); var files = file.listFiles(); if (files != null) { for (File f : files) { deleteFile(f); } } } if (!file.delete()) throw new IllegalStateException("Unable to delete: " + this); uncheck(() -> writeMetaFile("", ContentStatusDeleted)); return this; }
if (!listFiles().isEmpty())
public ApplicationFile delete() { if (file.isDirectory()) { if (!listFiles().isEmpty()) throw new RuntimeException("Can't delete, directory not empty: " + this + "(" + listFiles() + ")." + listFiles().size()); var files = file.listFiles(); if (files != null) { for (File f : files) { deleteFile(f); } } } if (!file.delete()) throw new IllegalStateException("Unable to delete: " + this); uncheck(() -> writeMetaFile("", ContentStatusDeleted)); return this; }
class FilesApplicationFile extends ApplicationFile { private static final Logger log = Logger.getLogger("FilesApplicationFile"); private final File file; private final ObjectMapper mapper = new ObjectMapper(); public FilesApplicationFile(Path path, File file) { super(path); this.file = file; } @Override public boolean isDirectory() { return file.isDirectory(); } @Override public boolean exists() { return file.exists(); } @Override public static boolean deleteFile(File path) { if (path.exists() && path.isDirectory()) { File[] files = path.listFiles(); for (File value : files) { if (value.isDirectory()) deleteFile(value); else value.delete(); } } return(path.delete()); } @Override public Reader createReader() throws FileNotFoundException { return new FileReader(file); } @Override public InputStream createInputStream() throws FileNotFoundException { return new FileInputStream(file); } @Override public ApplicationFile createDirectory() { if (file.isDirectory()) return this; if (file.exists()) { throw new IllegalArgumentException("Unable to create directory, file exists: "+file); } if (!file.mkdirs()) { throw new IllegalArgumentException("Unable to create directory: "+file); } try { writeMetaFile("", ContentStatusNew); } catch (IOException e) { throw new RuntimeException(e); } return this; } @Override public ApplicationFile writeFile(Reader input) { return uncheck(() -> writeFile(Utf8.toBytes(IOUtils.readAll(input)))); } @Override public ApplicationFile writeFile(InputStream input) { return uncheck(() -> writeFile(input.readAllBytes())); } private ApplicationFile writeFile(byte[] data) { if (file.getParentFile() != null) file.getParentFile().mkdirs(); String status = file.exists() ? ApplicationFile.ContentStatusChanged : ApplicationFile.ContentStatusNew; uncheck(() -> Files.write(file.toPath(), data)); uncheck(() -> writeMetaFile(data, status)); return this; } @Override public ApplicationFile appendFile(String value) { if (file.getParentFile() != null) { file.getParentFile().mkdirs(); } try { String status = file.exists() ? ContentStatusChanged : ContentStatusNew; IOUtils.writeFile(file, value, true); writeMetaFile(value, status); } catch (IOException e) { throw new RuntimeException(e); } return this; } @Override public List<ApplicationFile> listFiles(final PathFilter filter) { List<ApplicationFile> files = new ArrayList<>(); if (!file.isDirectory()) return files; FileFilter fileFilter = pathname -> filter.accept(path.append(pathname.getName())); File[] list = file.listFiles(fileFilter); if (list == null) return files; for (File child : list) { if (!child.getName().startsWith(".")) { files.add(new FilesApplicationFile(path.append(child.getName()), child)); } } return files; } private void writeMetaFile(String data, String status) throws IOException { writeMetaFile(Utf8.toBytes(data), status); } private void writeMetaFile(byte[] data, String status) throws IOException { File metaDir = createMetaDir(); log.log(Level.FINE, () -> "meta dir=" + metaDir); File metaFile = new File(metaDir + "/" + getPath().getName()); if (status == null) status = metaFile.exists() ? ContentStatusChanged : ContentStatusNew; String hash = (file.isDirectory() || status.equals(ContentStatusDeleted)) ? "" : ConfigUtils.getMd5(data); mapper.writeValue(metaFile, new MetaData(status, hash)); } private File createMetaDir() { File metaDir = getMetaDir(); if (!metaDir.exists()) { log.log(Level.FINE, () -> "Creating meta dir " + metaDir); metaDir.mkdirs(); } return metaDir; } private File getMetaDir() { String substring = file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf("/") + 1); return new File(substring + Path.fromString(".meta/")); } public MetaData getMetaData() { File metaDir = getMetaDir(); File metaFile = new File(metaDir + "/" + getPath().getName()); log.log(Level.FINE, () -> "Getting metadata for " + metaFile); if (metaFile.exists()) { try { return mapper.readValue(metaFile, MetaData.class); } catch (IOException e) { System.out.println("whot:" + Exceptions.toMessageString(e)); } } try { if (file.isDirectory()) { return new MetaData(ContentStatusNew, ""); } else { return new MetaData(ContentStatusNew, ConfigUtils.getMd5(IOUtils.readAll(createReader()))); } } catch (IOException | IllegalArgumentException e) { return null; } } @Override public long getSize() { return file.length(); } @Override public int compareTo(ApplicationFile other) { if (other == this) return 0; return this.getPath().getName().compareTo((other).getPath().getName()); } }
class FilesApplicationFile extends ApplicationFile { private static final Logger log = Logger.getLogger("FilesApplicationFile"); private final File file; private final ObjectMapper mapper = new ObjectMapper(); public FilesApplicationFile(Path path, File file) { super(path); this.file = file; } @Override public boolean isDirectory() { return file.isDirectory(); } @Override public boolean exists() { return file.exists(); } @Override public static boolean deleteFile(File path) { if (path.isDirectory()) { File[] files = path.listFiles(); for (File value : files) { if (value.isDirectory()) deleteFile(value); else value.delete(); } } return(path.delete()); } @Override public Reader createReader() throws FileNotFoundException { return new FileReader(file); } @Override public InputStream createInputStream() throws FileNotFoundException { return new FileInputStream(file); } @Override public ApplicationFile createDirectory() { if (file.isDirectory()) return this; if (file.exists()) { throw new IllegalArgumentException("Unable to create directory, file exists: "+file); } if (!file.mkdirs()) { throw new IllegalArgumentException("Unable to create directory: "+file); } try { writeMetaFile("", ContentStatusNew); } catch (IOException e) { throw new RuntimeException(e); } return this; } @Override public ApplicationFile writeFile(Reader input) { return uncheck(() -> writeFile(Utf8.toBytes(IOUtils.readAll(input)))); } @Override public ApplicationFile writeFile(InputStream input) { return uncheck(() -> writeFile(input.readAllBytes())); } private ApplicationFile writeFile(byte[] data) { if (file.getParentFile() != null) file.getParentFile().mkdirs(); String status = file.exists() ? ApplicationFile.ContentStatusChanged : ApplicationFile.ContentStatusNew; uncheck(() -> Files.write(file.toPath(), data)); uncheck(() -> writeMetaFile(data, status)); return this; } @Override public ApplicationFile appendFile(String value) { if (file.getParentFile() != null) { file.getParentFile().mkdirs(); } try { String status = file.exists() ? ContentStatusChanged : ContentStatusNew; IOUtils.writeFile(file, value, true); writeMetaFile(value, status); } catch (IOException e) { throw new RuntimeException(e); } return this; } @Override public List<ApplicationFile> listFiles(final PathFilter filter) { List<ApplicationFile> files = new ArrayList<>(); if (!file.isDirectory()) return files; FileFilter fileFilter = pathname -> filter.accept(path.append(pathname.getName())); File[] list = file.listFiles(fileFilter); if (list == null) return files; for (File child : list) { if (!child.getName().startsWith(".")) { files.add(new FilesApplicationFile(path.append(child.getName()), child)); } } return files; } private void writeMetaFile(String data, String status) throws IOException { writeMetaFile(Utf8.toBytes(data), status); } private void writeMetaFile(byte[] data, String status) throws IOException { File metaDir = createMetaDir(); log.log(Level.FINE, () -> "meta dir=" + metaDir); File metaFile = new File(metaDir + "/" + getPath().getName()); if (status == null) status = metaFile.exists() ? ContentStatusChanged : ContentStatusNew; String hash = (file.isDirectory() || status.equals(ContentStatusDeleted)) ? "" : ConfigUtils.getMd5(data); mapper.writeValue(metaFile, new MetaData(status, hash)); } private File createMetaDir() { File metaDir = getMetaDir(); if (!metaDir.exists()) { log.log(Level.FINE, () -> "Creating meta dir " + metaDir); metaDir.mkdirs(); } return metaDir; } private File getMetaDir() { String substring = file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf("/") + 1); return new File(substring + Path.fromString(".meta/")); } public MetaData getMetaData() { File metaDir = getMetaDir(); File metaFile = new File(metaDir + "/" + getPath().getName()); log.log(Level.FINE, () -> "Getting metadata for " + metaFile); if (metaFile.exists()) { try { return mapper.readValue(metaFile, MetaData.class); } catch (IOException e) { System.out.println("whot:" + Exceptions.toMessageString(e)); } } try { if (file.isDirectory()) { return new MetaData(ContentStatusNew, ""); } else { return new MetaData(ContentStatusNew, ConfigUtils.getMd5(IOUtils.readAll(createReader()))); } } catch (IOException | IllegalArgumentException e) { return null; } } @Override public long getSize() { return file.length(); } @Override public int compareTo(ApplicationFile other) { if (other == this) return 0; return this.getPath().getName().compareTo((other).getPath().getName()); } }
`try-with-resources` automatically handles this, btw., when the `close()` there throws; then, that is added as a suppressed exception to whatever was thrown inside the `try { }` block; or it is thrown as the main exception if nothing else was thrown.
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
if (exception != null) throw exception;
protected void render() throws IOException, InterruptedException, ExecutionException { try { if (dataListListenerStack.peekFirst() != this) return; if (beforeHandoverMode && ! list.isFrozen()) return; if ( ! beforeHandoverMode) list.completeFuture().get(); boolean startedRendering = renderData(); if ( ! startedRendering || uncompletedChildren > 0) return; if (list.completeFuture().isDone()) endListLevel(); else stream.flush(); } finally { RuntimeException exception = null; while (!syncTasks.isEmpty()) { try { syncTasks.poll().run(); } catch (RuntimeException e) { if (exception == null) exception = e; else exception.addSuppressed(e); } } if (exception != null) throw exception; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
class DataListListener extends RendererListener { /** The index of the next data item where rendering should be initiated in this list */ private int currentIndex = 0; /** Children of this which has started rendering but not yet completed */ private int uncompletedChildren = 0; private boolean listStartIsRendered = false; /** The list which this is listening to */ private final DataList list; /** The listener to the parent of this list, or null if this is the root */ private final DataListListener parent; /** Queue of rendering tasks that can be executed immediately without dispatching to executor and incuring a context switch */ private final Queue<Runnable> syncTasks = new LinkedList<>(); public DataListListener(DataList list, DataListListener parent) { this.list = list; this.parent = parent; } @Override private void endListLevel() throws IOException { endRenderLevel(list); stream.flush(); dataListListenerStack.removeFirst(); if (parent != null) parent.childCompleted(); list.close(); } /** Called each time a direct child of this completed. */ private void childCompleted() { uncompletedChildren--; if (uncompletedChildren > 0) return; if (list.incoming().isComplete()) run(); } /** * Resumes rendering data from the current position. * Called both on completion (by this), and when new data is available (from the new data listener). * * @return whether this started rendering */ @SuppressWarnings("unchecked") private boolean renderData() throws IOException { if (dataListListenerStack.peekFirst() != this) return false; renderDataListStart(); for (Object data : list.incoming().drain()) list.add((Data) data); renderDataList(list); return true; } void renderDataListStart() throws IOException { if ( ! listStartIsRendered) { if (list instanceof ParentOfTopLevel) beginResponse(stream); else beginList(list); listStartIsRendered = true; } } /** Renders a list. */ private void renderDataList(DataList list) throws IOException { boolean ordered = isOrdered(list); if (list.asList() == null) { logger.log(Level.WARNING, "DataList.asList() returned null, indicating it is closed. " + "This is likely caused by adding the same list multiple " + "times in the response."); return; } while (currentIndex < list.asList().size()) { Data data = list.get(currentIndex++); if (data instanceof DataList) { listenTo((DataList)data, ordered && isStreamed((DataList)data)); uncompletedChildren++; if (ordered) return; } else { data(data); } } } private void listenTo(DataList<?> subList, boolean listenToNewDataAdded) throws IOException { DataListListener listListener = new DataListListener(subList,this); dataListListenerStack.addFirst(listListener); if (listenToNewDataAdded) subList.incoming().addNewDataListener(new DataListener(listListener), getExecutor()); flushIfLikelyToSuspend(subList); if (subList.isFrozen()) syncTasks.add(listListener); else subList.addFreezeListener(listListener, getExecutor()); if (subList.completeFuture().isDone()) syncTasks.add(listListener); else subList.completeFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); if (subList.incoming().completedFuture().isDone()) syncTasks.add(listListener); else subList.incoming().completedFuture().whenCompleteAsync((__, ___) -> listListener.run(), getExecutor()); } private boolean isOrdered(DataList dataList) { if (! (dataList instanceof Ordered)) return true; return ((Ordered)dataList).isOrdered(); } private boolean isStreamed(DataList dataList) { if (! (dataList instanceof Streamed)) return true; return ((Streamed)dataList).isStreamed(); } private void endRenderLevel(DataList<?> current) throws IOException { if (current instanceof ParentOfTopLevel) { endResponse(); closeIO(null); } else { endList(current); } } private void closeIO(Exception failed) { IOException closeException = null; try { stream.close(); } catch (IOException e) { closeException = e; logger.log(Level.WARNING, "Exception caught while closing stream to client.", e); } finally { if (failed != null) { success.completeExceptionally(failed); } else if (closeException != null) { success.completeExceptionally(closeException); } else { success.complete(true); } if (channel != null) { channel.close(completionHandler); } } } @Override public String toString() { return "listener to " + list; } }
JUnit seems to now do a stack trim by default, but a really poor one, leaving a tail of stack frames that we don't want, but which also doesn't contain the info we expected in _our_ trim code, so that also breaks 😢 This is a best-effort attempt to remedy that. But, hmm ... maybe `java.lang.Thread` should be exempt here. That's actually a valid bottom.
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
cutoff = i + 1;
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.reflect.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
I expect this code will have to be amended many times when we upgrade JUnit in the future. I also haven't found a better way to trim these traces. I don't think we have easy access to the test class name we expect to be inside, either. I had a look for that now, but at least couldn't find anything obvious.
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
cutoff = i + 1;
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.reflect.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
🤞
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
cutoff = i + 1;
static void trimStackTraces(Throwable thrown) { if (thrown == null) return; StackTraceElement[] stack = thrown.getStackTrace(); int i = 0; int firstReflectFrame = -1; int cutoff = 0; boolean rootedInTestFramework = false; while (++i < stack.length) { rootedInTestFramework |= testFrameworkRootClasses.contains(stack[i].getClassName()); if (firstReflectFrame == -1 && (stack[i].getClassName().startsWith("jdk.internal.reflect.") || stack[i].getClassName().startsWith("java.lang.reflect."))) firstReflectFrame = i; if (rootedInTestFramework && firstReflectFrame > 0) { cutoff = firstReflectFrame; break; } boolean isDynamicTestInvocation = "org.junit.jupiter.engine.descriptor.DynamicTestTestDescriptor".equals(stack[i].getClassName()); if (isDynamicTestInvocation) { cutoff = i; break; } } if (cutoff == 0) { while (--i >= 0 && ( stack[i].isNativeMethod() || stack[i].getClassName().startsWith("java.lang.reflect.") || stack[i].getClassName().startsWith("java.util."))); cutoff = i + 1; } thrown.setStackTrace(copyOf(stack, cutoff)); for (Throwable suppressed : thrown.getSuppressed()) trimStackTraces(suppressed); trimStackTraces(thrown.getCause()); }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
class FailureNode extends NamedNode { private final Throwable thrown; private final Suite suite; FailureNode(NamedNode parent, Instant now, Throwable thrown, Suite suite) { super(parent, null, thrown.toString(), now); trimStackTraces(thrown); this.thrown = thrown; this.suite = suite; LogRecord record = new LogRecord(levelOf(status()), null); record.setThrown(thrown); record.setInstant(now); OutputNode child = new OutputNode(this); child.log.add(record); children.add(child); } public Throwable thrown() { return thrown; } @Override public Duration duration() { return Duration.ZERO; } @Override public Status status() { return suite == Suite.PRODUCTION_TEST && thrown instanceof InconclusiveTestException ? Status.inconclusive : thrown instanceof AssertionError ? Status.failed : Status.error; } }
done
public StateChangeExecution getStateChangeExecution() { GlobalStateMgr gsm = this; StateChangeExecution execution = new StateChangeExecution() { @Override public void transferToLeader(FrontendNodeType newType) { gsm.transferToLeader(newType); } @Override public void transferToNonLeader(FrontendNodeType newType) { gsm.transferToNonLeader(newType); } }; return execution; }
};
public StateChangeExecution getStateChangeExecution() { return execution; }
class SingletonHolder { private static final GlobalStateMgr INSTANCE = new GlobalStateMgr(); }
class SingletonHolder { private static final GlobalStateMgr INSTANCE = new GlobalStateMgr(); }
Why `required="true"` all of a sudden?
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
<nodes count="2" groups="2" required="true">
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
It is special cased for the node element in https://github.com/vespa-engine/vespa/blob/master/config-application-package/src/main/java/com/yahoo/config/application/OverrideProcessor.java#L230-L238 I don't know exactly why this is done, but it would not happen if I had tested the override on another element and I'm pretty sure this PR doesn't change the behaviour, only makes it happen in the unit test.
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
<nodes count="2" groups="2" required="true">
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
We'll automatically downscale node resources in non-prod environments, but not if they are set specifically for that environment. When the region is resolved, this attribute is set to signal that the node resources are set specifically for the environment and that they therefore are "required".
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
<nodes count="2" groups="2" required="true">
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
Thanks, that makes perfect sense.
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
<nodes count="2" groups="2" required="true">
public void testProdBetaUsWest2a() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="3" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="3" groups="3" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="37Gb" memory="9Gb" vcpu="3"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-west-2a"), CloudName.GCP, expected); }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
class HostedOverrideProcessorComplexTest { private static final String servicesFile = "src/test/resources/complex-app/services.xml"; @Test @Test public void testProdBetaUsEast1b() throws TransformerException { String expected = """ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --><services xmlns:deploy="vespa" xmlns:preprocess="properties" version="1.0"> <container id="docsgateway" version="1.0"> <nodes count="3"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> </container> <container id="qrs" version="1.0"> <nodes count="5" required="true"> <resources disk="64Gb" memory="32Gb" vcpu="16"/> </nodes> <search/> </container> <container id="visitor" version="1.0"> <nodes count="2"> <resources disk="32Gb" memory="16Gb" vcpu="8"/> </nodes> <search/> </container> <content id="all" version="1.0"> <nodes count="10" groups="10" required="true"> <resources disk="1800Gb" disk-speed="fast" memory="96Gb" storage-type="local" vcpu="48"/> </nodes> <redundancy>1</redundancy> </content> <content id="filedocument" version="1.0"> <nodes count="2" groups="2" required="true"> <resources disk="32Gb" memory="8Gb" vcpu="4"/> </nodes> <redundancy>1</redundancy> </content> </services> """; assertOverride(InstanceName.from("beta1"), Environment.prod, RegionName.from("aws-us-east-1b"), CloudName.AWS, expected); } private void assertOverride(InstanceName instance, Environment environment, RegionName region, CloudName cloud, String expected) throws TransformerException { ApplicationPackage app = FilesApplicationPackage.fromFile(new File(servicesFile).getParentFile()); Document inputDoc = Xml.getDocument(app.getServices()); Tags tags = app.getDeploymentSpec().instance(instance).map(DeploymentInstanceSpec::tags).orElse(Tags.empty()); Document newDoc = new OverrideProcessor(instance, environment, region, cloud, tags).process(inputDoc); assertEquals(expected, Xml.documentAsString(newDoc, true)); } }
Looks like the same information is stored twice now, since `suggested` is simply the first element of `suggestions`. Consider adding a TODO for removing `suggested` from serialization (needs to happen after this rolls out).
private static void toSlime(Cluster cluster, Cursor clusterObject) { clusterObject.setBool(exclusiveKey, cluster.exclusive()); toSlime(cluster.minResources(), clusterObject.setObject(minResourcesKey)); toSlime(cluster.maxResources(), clusterObject.setObject(maxResourcesKey)); toSlime(cluster.groupSize(), clusterObject.setObject(groupSizeKey)); clusterObject.setBool(requiredKey, cluster.required()); toSlime(cluster.suggested(), clusterObject.setObject(suggestedKey)); toSlime(cluster.suggestions(), clusterObject.setArray(suggestionsKey)); toSlime(cluster.target(), clusterObject.setObject(targetKey)); if (! cluster.clusterInfo().isEmpty()) toSlime(cluster.clusterInfo(), clusterObject.setObject(clusterInfoKey)); if (! cluster.bcpGroupInfo().isEmpty()) toSlime(cluster.bcpGroupInfo(), clusterObject.setObject(bcpGroupInfoKey)); scalingEventsToSlime(cluster.scalingEvents(), clusterObject.setArray(scalingEventsKey)); }
toSlime(cluster.suggestions(), clusterObject.setArray(suggestionsKey));
private static void toSlime(Cluster cluster, Cursor clusterObject) { clusterObject.setBool(exclusiveKey, cluster.exclusive()); toSlime(cluster.minResources(), clusterObject.setObject(minResourcesKey)); toSlime(cluster.maxResources(), clusterObject.setObject(maxResourcesKey)); toSlime(cluster.groupSize(), clusterObject.setObject(groupSizeKey)); clusterObject.setBool(requiredKey, cluster.required()); toSlime(cluster.suggested(), clusterObject.setObject(suggestedKey)); toSlime(cluster.suggestions(), clusterObject.setArray(suggestionsKey)); toSlime(cluster.target(), clusterObject.setObject(targetKey)); if (! cluster.clusterInfo().isEmpty()) toSlime(cluster.clusterInfo(), clusterObject.setObject(clusterInfoKey)); if (! cluster.bcpGroupInfo().isEmpty()) toSlime(cluster.bcpGroupInfo(), clusterObject.setObject(bcpGroupInfoKey)); scalingEventsToSlime(cluster.scalingEvents(), clusterObject.setArray(scalingEventsKey)); }
class ApplicationSerializer { private static final String idKey = "id"; private static final String statusKey = "status"; private static final String currentReadShareKey = "currentReadShare"; private static final String maxReadShareKey = "maxReadShare"; private static final String clustersKey = "clusters"; private static final String exclusiveKey = "exclusive"; private static final String minResourcesKey = "min"; private static final String maxResourcesKey = "max"; private static final String groupSizeKey = "groupSize"; private static final String requiredKey = "required"; private static final String suggestedKey = "suggested"; private static final String suggestionsKey = "suggestionsKey"; private static final String clusterInfoKey = "clusterInfo"; private static final String bcpDeadlineKey = "bcpDeadline"; private static final String hostTTLKey = "hostTTL"; private static final String bcpGroupInfoKey = "bcpGroupInfo"; private static final String queryRateKey = "queryRateKey"; private static final String growthRateHeadroomKey = "growthRateHeadroomKey"; private static final String cpuCostPerQueryKey = "cpuCostPerQueryKey"; private static final String resourcesKey = "resources"; private static final String targetKey = "target"; private static final String nodesKey = "nodes"; private static final String groupsKey = "groups"; private static final String nodeResourcesKey = "resources"; private static final String scalingEventsKey = "scalingEvents"; private static final String descriptionKey = "description"; private static final String peakKey = "peak"; private static final String idealKey = "ideal"; private static final String metricsKey = "metrics"; private static final String cpuKey = "cpu"; private static final String memoryKey = "memory"; private static final String diskKey = "disk"; private static final String gpuKey = "gpu"; private static final String gpuMemory = "gpuMemory"; private static final String fromKey = "from"; private static final String toKey = "to"; private static final String generationKey = "generation"; private static final String atKey = "at"; private static final String completionKey = "completion"; public static byte[] toJson(Application application) { Slime slime = new Slime(); toSlime(application, slime.setObject()); try { return SlimeUtils.toJsonBytes(slime); } catch (IOException e) { throw new UncheckedIOException(e); } } public static Application fromJson(byte[] data) { return applicationFromSlime(SlimeUtils.jsonToSlime(data).get()); } private static void toSlime(Application application, Cursor object) { object.setString(idKey, application.id().serializedForm()); toSlime(application.status(), object.setObject(statusKey)); clustersToSlime(application.clusters().values(), object.setObject(clustersKey)); } private static Application applicationFromSlime(Inspector applicationObject) { ApplicationId id = ApplicationId.fromSerializedForm(applicationObject.field(idKey).asString()); return new Application(id, statusFromSlime(applicationObject.field(statusKey)), clustersFromSlime(applicationObject.field(clustersKey))); } private static void toSlime(Status status, Cursor statusObject) { statusObject.setDouble(currentReadShareKey, status.currentReadShare()); statusObject.setDouble(maxReadShareKey, status.maxReadShare()); } private static Status statusFromSlime(Inspector statusObject) { return new Status(statusObject.field(currentReadShareKey).asDouble(), statusObject.field(maxReadShareKey).asDouble()); } private static void clustersToSlime(Collection<Cluster> clusters, Cursor clustersObject) { clusters.forEach(cluster -> toSlime(cluster, clustersObject.setObject(cluster.id().value()))); } private static Collection<Cluster> clustersFromSlime(Inspector clustersObject) { List<Cluster> clusters = new ArrayList<>(); clustersObject.traverse((ObjectTraverser)(id, clusterObject) -> clusters.add(clusterFromSlime(id, clusterObject))); return clusters; } private static Cluster clusterFromSlime(String id, Inspector clusterObject) { return new Cluster(ClusterSpec.Id.from(id), clusterObject.field(exclusiveKey).asBool(), clusterResourcesFromSlime(clusterObject.field(minResourcesKey)), clusterResourcesFromSlime(clusterObject.field(maxResourcesKey)), intRangeFromSlime(clusterObject.field(groupSizeKey)), clusterObject.field(requiredKey).asBool(), autoscalingFromSlime(clusterObject.field(suggestedKey)), suggestionsFromSlime(clusterObject.field(suggestionsKey)), autoscalingFromSlime(clusterObject.field(targetKey)), clusterInfoFromSlime(clusterObject.field(clusterInfoKey)), bcpGroupInfoFromSlime(clusterObject.field(bcpGroupInfoKey)), scalingEventsFromSlime(clusterObject.field(scalingEventsKey))); } private static void toSlime(List<Autoscaling> suggestions, Cursor suggestionsArray) { suggestions.forEach(suggestion -> { var suggestionObject = suggestionsArray.addObject(); toSlime(suggestion, suggestionObject); }); } private static void toSlime(Autoscaling autoscaling, Cursor autoscalingObject) { autoscalingObject.setString(statusKey, toAutoscalingStatusCode(autoscaling.status())); autoscalingObject.setString(descriptionKey, autoscaling.description()); autoscaling.resources().ifPresent(resources -> toSlime(resources, autoscalingObject.setObject(resourcesKey))); autoscalingObject.setLong(atKey, autoscaling.at().toEpochMilli()); toSlime(autoscaling.peak(), autoscalingObject.setObject(peakKey)); toSlime(autoscaling.ideal(), autoscalingObject.setObject(idealKey)); toSlime(autoscaling.metrics(), autoscalingObject.setObject(metricsKey)); } private static void toSlime(ClusterResources resources, Cursor clusterResourcesObject) { clusterResourcesObject.setLong(nodesKey, resources.nodes()); clusterResourcesObject.setLong(groupsKey, resources.groups()); NodeResourcesSerializer.toSlime(resources.nodeResources(), clusterResourcesObject.setObject(nodeResourcesKey)); } private static Optional<ClusterResources> optionalClusterResourcesFromSlime(Inspector clusterResourcesObject) { if ( ! clusterResourcesObject.valid()) return Optional.empty(); return Optional.of(clusterResourcesFromSlime(clusterResourcesObject)); } private static ClusterResources clusterResourcesFromSlime(Inspector clusterResourcesObject) { return new ClusterResources((int)clusterResourcesObject.field(nodesKey).asLong(), (int)clusterResourcesObject.field(groupsKey).asLong(), NodeResourcesSerializer.resourcesFromSlime(clusterResourcesObject.field(nodeResourcesKey))); } private static void toSlime(IntRange range, Cursor rangeObject) { range.from().ifPresent(from -> rangeObject.setLong(fromKey, from)); range.to().ifPresent(from -> rangeObject.setLong(toKey, from)); } private static IntRange intRangeFromSlime(Inspector rangeObject) { if ( ! rangeObject.valid()) return IntRange.empty(); return new IntRange(optionalInt(rangeObject.field(fromKey)), optionalInt(rangeObject.field(toKey))); } private static void toSlime(Load load, Cursor loadObject) { loadObject.setDouble(cpuKey, load.cpu()); loadObject.setDouble(memoryKey, load.memory()); loadObject.setDouble(diskKey, load.disk()); loadObject.setDouble(gpuKey, load.gpu()); loadObject.setDouble(gpuMemory, load.gpuMemory()); } private static Load loadFromSlime(Inspector loadObject) { return new Load(loadObject.field(cpuKey).asDouble(), loadObject.field(memoryKey).asDouble(), loadObject.field(diskKey).asDouble(), loadObject.field(gpuKey).asDouble(), loadObject.field(gpuMemory).asDouble()); } private static void toSlime(Autoscaling.Metrics metrics, Cursor metricsObject) { metricsObject.setDouble(queryRateKey, metrics.queryRate()); metricsObject.setDouble(growthRateHeadroomKey, metrics.growthRateHeadroom()); metricsObject.setDouble(cpuCostPerQueryKey, metrics.cpuCostPerQuery()); } private static Autoscaling.Metrics metricsFromSlime(Inspector metricsObject) { return new Autoscaling.Metrics(metricsObject.field(queryRateKey).asDouble(), metricsObject.field(growthRateHeadroomKey).asDouble(), metricsObject.field(cpuCostPerQueryKey).asDouble()); } private static List<Autoscaling> suggestionsFromSlime(Inspector suggestionsObject) { var suggestions = new ArrayList<Autoscaling>(); if (!suggestionsObject.valid()) return suggestions; suggestionsObject.traverse((ArrayTraverser) (id, suggestion) -> suggestions.add(autoscalingFromSlime(suggestion))); return suggestions; } private static Autoscaling autoscalingFromSlime(Inspector autoscalingObject) { if ( ! autoscalingObject.valid()) return Autoscaling.empty(); return new Autoscaling(fromAutoscalingStatusCode(autoscalingObject.field(statusKey).asString()), autoscalingObject.field(descriptionKey).asString(), optionalClusterResourcesFromSlime(autoscalingObject.field(resourcesKey)), Instant.ofEpochMilli(autoscalingObject.field(atKey).asLong()), loadFromSlime(autoscalingObject.field(peakKey)), loadFromSlime(autoscalingObject.field(idealKey)), metricsFromSlime(autoscalingObject.field(metricsKey))); } private static void toSlime(ClusterInfo clusterInfo, Cursor clusterInfoObject) { clusterInfoObject.setLong(bcpDeadlineKey, clusterInfo.bcpDeadline().toMinutes()); if ( ! clusterInfo.hostTTL().isZero()) clusterInfoObject.setLong(hostTTLKey, clusterInfo.hostTTL().toMillis()); } private static ClusterInfo clusterInfoFromSlime(Inspector clusterInfoObject) { if ( ! clusterInfoObject.valid()) return ClusterInfo.empty(); ClusterInfo.Builder builder = new ClusterInfo.Builder(); builder.bcpDeadline(Duration.ofMinutes(clusterInfoObject.field(bcpDeadlineKey).asLong())); builder.hostTTL(Duration.ofMillis(clusterInfoObject.field(hostTTLKey).asLong())); return builder.build(); } private static void toSlime(BcpGroupInfo bcpGroupInfo, Cursor bcpGroupInfoObject) { bcpGroupInfoObject.setDouble(queryRateKey, bcpGroupInfo.queryRate()); bcpGroupInfoObject.setDouble(growthRateHeadroomKey, bcpGroupInfo.growthRateHeadroom()); bcpGroupInfoObject.setDouble(cpuCostPerQueryKey, bcpGroupInfo.cpuCostPerQuery()); } private static BcpGroupInfo bcpGroupInfoFromSlime(Inspector bcpGroupInfoObject) { if ( ! bcpGroupInfoObject.valid()) return BcpGroupInfo.empty(); return new BcpGroupInfo(bcpGroupInfoObject.field(queryRateKey).asDouble(), bcpGroupInfoObject.field(growthRateHeadroomKey).asDouble(), bcpGroupInfoObject.field(cpuCostPerQueryKey).asDouble()); } private static void scalingEventsToSlime(List<ScalingEvent> scalingEvents, Cursor eventArray) { scalingEvents.forEach(event -> toSlime(event, eventArray.addObject())); } private static List<ScalingEvent> scalingEventsFromSlime(Inspector eventArray) { return SlimeUtils.entriesStream(eventArray).map(item -> scalingEventFromSlime(item)).toList(); } private static void toSlime(ScalingEvent event, Cursor object) { toSlime(event.from(), object.setObject(fromKey)); toSlime(event.to(), object.setObject(toKey)); object.setLong(generationKey, event.generation()); object.setLong(atKey, event.at().toEpochMilli()); event.completion().ifPresent(completion -> object.setLong(completionKey, completion.toEpochMilli())); } private static ScalingEvent scalingEventFromSlime(Inspector inspector) { return new ScalingEvent(clusterResourcesFromSlime(inspector.field(fromKey)), clusterResourcesFromSlime(inspector.field(toKey)), inspector.field(generationKey).asLong(), Instant.ofEpochMilli(inspector.field(atKey).asLong()), optionalInstant(inspector.field(completionKey))); } private static String toAutoscalingStatusCode(Autoscaling.Status status) { return switch (status) { case unavailable -> "unavailable"; case waiting -> "waiting"; case ideal -> "ideal"; case insufficient -> "insufficient"; case rescaling -> "rescaling"; }; } private static Autoscaling.Status fromAutoscalingStatusCode(String code) { return switch (code) { case "" -> Autoscaling.Status.unavailable; case "unavailable" -> Autoscaling.Status.unavailable; case "waiting" -> Autoscaling.Status.waiting; case "ideal" -> Autoscaling.Status.ideal; case "insufficient" -> Autoscaling.Status.insufficient; case "rescaling" -> Autoscaling.Status.rescaling; default -> throw new IllegalArgumentException("Unknown autoscaling status '" + code + "'"); }; } private static Optional<Instant> optionalInstant(Inspector inspector) { return inspector.valid() ? Optional.of(Instant.ofEpochMilli(inspector.asLong())) : Optional.empty(); } private static OptionalInt optionalInt(Inspector inspector) { return inspector.valid() ? OptionalInt.of((int)inspector.asLong()) : OptionalInt.empty(); } }
class ApplicationSerializer { private static final String idKey = "id"; private static final String statusKey = "status"; private static final String currentReadShareKey = "currentReadShare"; private static final String maxReadShareKey = "maxReadShare"; private static final String clustersKey = "clusters"; private static final String exclusiveKey = "exclusive"; private static final String minResourcesKey = "min"; private static final String maxResourcesKey = "max"; private static final String groupSizeKey = "groupSize"; private static final String requiredKey = "required"; private static final String suggestedKey = "suggested"; private static final String suggestionsKey = "suggestionsKey"; private static final String clusterInfoKey = "clusterInfo"; private static final String bcpDeadlineKey = "bcpDeadline"; private static final String hostTTLKey = "hostTTL"; private static final String bcpGroupInfoKey = "bcpGroupInfo"; private static final String queryRateKey = "queryRateKey"; private static final String growthRateHeadroomKey = "growthRateHeadroomKey"; private static final String cpuCostPerQueryKey = "cpuCostPerQueryKey"; private static final String resourcesKey = "resources"; private static final String targetKey = "target"; private static final String nodesKey = "nodes"; private static final String groupsKey = "groups"; private static final String nodeResourcesKey = "resources"; private static final String scalingEventsKey = "scalingEvents"; private static final String descriptionKey = "description"; private static final String peakKey = "peak"; private static final String idealKey = "ideal"; private static final String metricsKey = "metrics"; private static final String cpuKey = "cpu"; private static final String memoryKey = "memory"; private static final String diskKey = "disk"; private static final String gpuKey = "gpu"; private static final String gpuMemory = "gpuMemory"; private static final String fromKey = "from"; private static final String toKey = "to"; private static final String generationKey = "generation"; private static final String atKey = "at"; private static final String completionKey = "completion"; public static byte[] toJson(Application application) { Slime slime = new Slime(); toSlime(application, slime.setObject()); try { return SlimeUtils.toJsonBytes(slime); } catch (IOException e) { throw new UncheckedIOException(e); } } public static Application fromJson(byte[] data) { return applicationFromSlime(SlimeUtils.jsonToSlime(data).get()); } private static void toSlime(Application application, Cursor object) { object.setString(idKey, application.id().serializedForm()); toSlime(application.status(), object.setObject(statusKey)); clustersToSlime(application.clusters().values(), object.setObject(clustersKey)); } private static Application applicationFromSlime(Inspector applicationObject) { ApplicationId id = ApplicationId.fromSerializedForm(applicationObject.field(idKey).asString()); return new Application(id, statusFromSlime(applicationObject.field(statusKey)), clustersFromSlime(applicationObject.field(clustersKey))); } private static void toSlime(Status status, Cursor statusObject) { statusObject.setDouble(currentReadShareKey, status.currentReadShare()); statusObject.setDouble(maxReadShareKey, status.maxReadShare()); } private static Status statusFromSlime(Inspector statusObject) { return new Status(statusObject.field(currentReadShareKey).asDouble(), statusObject.field(maxReadShareKey).asDouble()); } private static void clustersToSlime(Collection<Cluster> clusters, Cursor clustersObject) { clusters.forEach(cluster -> toSlime(cluster, clustersObject.setObject(cluster.id().value()))); } private static Collection<Cluster> clustersFromSlime(Inspector clustersObject) { List<Cluster> clusters = new ArrayList<>(); clustersObject.traverse((ObjectTraverser)(id, clusterObject) -> clusters.add(clusterFromSlime(id, clusterObject))); return clusters; } private static Cluster clusterFromSlime(String id, Inspector clusterObject) { return new Cluster(ClusterSpec.Id.from(id), clusterObject.field(exclusiveKey).asBool(), clusterResourcesFromSlime(clusterObject.field(minResourcesKey)), clusterResourcesFromSlime(clusterObject.field(maxResourcesKey)), intRangeFromSlime(clusterObject.field(groupSizeKey)), clusterObject.field(requiredKey).asBool(), autoscalingFromSlime(clusterObject.field(suggestedKey)), suggestionsFromSlime(clusterObject.field(suggestionsKey)), autoscalingFromSlime(clusterObject.field(targetKey)), clusterInfoFromSlime(clusterObject.field(clusterInfoKey)), bcpGroupInfoFromSlime(clusterObject.field(bcpGroupInfoKey)), scalingEventsFromSlime(clusterObject.field(scalingEventsKey))); } private static void toSlime(List<Autoscaling> suggestions, Cursor suggestionsArray) { suggestions.forEach(suggestion -> { var suggestionObject = suggestionsArray.addObject(); toSlime(suggestion, suggestionObject); }); } private static void toSlime(Autoscaling autoscaling, Cursor autoscalingObject) { autoscalingObject.setString(statusKey, toAutoscalingStatusCode(autoscaling.status())); autoscalingObject.setString(descriptionKey, autoscaling.description()); autoscaling.resources().ifPresent(resources -> toSlime(resources, autoscalingObject.setObject(resourcesKey))); autoscalingObject.setLong(atKey, autoscaling.at().toEpochMilli()); toSlime(autoscaling.peak(), autoscalingObject.setObject(peakKey)); toSlime(autoscaling.ideal(), autoscalingObject.setObject(idealKey)); toSlime(autoscaling.metrics(), autoscalingObject.setObject(metricsKey)); } private static void toSlime(ClusterResources resources, Cursor clusterResourcesObject) { clusterResourcesObject.setLong(nodesKey, resources.nodes()); clusterResourcesObject.setLong(groupsKey, resources.groups()); NodeResourcesSerializer.toSlime(resources.nodeResources(), clusterResourcesObject.setObject(nodeResourcesKey)); } private static Optional<ClusterResources> optionalClusterResourcesFromSlime(Inspector clusterResourcesObject) { if ( ! clusterResourcesObject.valid()) return Optional.empty(); return Optional.of(clusterResourcesFromSlime(clusterResourcesObject)); } private static ClusterResources clusterResourcesFromSlime(Inspector clusterResourcesObject) { return new ClusterResources((int)clusterResourcesObject.field(nodesKey).asLong(), (int)clusterResourcesObject.field(groupsKey).asLong(), NodeResourcesSerializer.resourcesFromSlime(clusterResourcesObject.field(nodeResourcesKey))); } private static void toSlime(IntRange range, Cursor rangeObject) { range.from().ifPresent(from -> rangeObject.setLong(fromKey, from)); range.to().ifPresent(from -> rangeObject.setLong(toKey, from)); } private static IntRange intRangeFromSlime(Inspector rangeObject) { if ( ! rangeObject.valid()) return IntRange.empty(); return new IntRange(optionalInt(rangeObject.field(fromKey)), optionalInt(rangeObject.field(toKey))); } private static void toSlime(Load load, Cursor loadObject) { loadObject.setDouble(cpuKey, load.cpu()); loadObject.setDouble(memoryKey, load.memory()); loadObject.setDouble(diskKey, load.disk()); loadObject.setDouble(gpuKey, load.gpu()); loadObject.setDouble(gpuMemory, load.gpuMemory()); } private static Load loadFromSlime(Inspector loadObject) { return new Load(loadObject.field(cpuKey).asDouble(), loadObject.field(memoryKey).asDouble(), loadObject.field(diskKey).asDouble(), loadObject.field(gpuKey).asDouble(), loadObject.field(gpuMemory).asDouble()); } private static void toSlime(Autoscaling.Metrics metrics, Cursor metricsObject) { metricsObject.setDouble(queryRateKey, metrics.queryRate()); metricsObject.setDouble(growthRateHeadroomKey, metrics.growthRateHeadroom()); metricsObject.setDouble(cpuCostPerQueryKey, metrics.cpuCostPerQuery()); } private static Autoscaling.Metrics metricsFromSlime(Inspector metricsObject) { return new Autoscaling.Metrics(metricsObject.field(queryRateKey).asDouble(), metricsObject.field(growthRateHeadroomKey).asDouble(), metricsObject.field(cpuCostPerQueryKey).asDouble()); } private static List<Autoscaling> suggestionsFromSlime(Inspector suggestionsObject) { var suggestions = new ArrayList<Autoscaling>(); if (!suggestionsObject.valid()) return suggestions; suggestionsObject.traverse((ArrayTraverser) (id, suggestion) -> suggestions.add(autoscalingFromSlime(suggestion))); return suggestions; } private static Autoscaling autoscalingFromSlime(Inspector autoscalingObject) { if ( ! autoscalingObject.valid()) return Autoscaling.empty(); return new Autoscaling(fromAutoscalingStatusCode(autoscalingObject.field(statusKey).asString()), autoscalingObject.field(descriptionKey).asString(), optionalClusterResourcesFromSlime(autoscalingObject.field(resourcesKey)), Instant.ofEpochMilli(autoscalingObject.field(atKey).asLong()), loadFromSlime(autoscalingObject.field(peakKey)), loadFromSlime(autoscalingObject.field(idealKey)), metricsFromSlime(autoscalingObject.field(metricsKey))); } private static void toSlime(ClusterInfo clusterInfo, Cursor clusterInfoObject) { clusterInfoObject.setLong(bcpDeadlineKey, clusterInfo.bcpDeadline().toMinutes()); if ( ! clusterInfo.hostTTL().isZero()) clusterInfoObject.setLong(hostTTLKey, clusterInfo.hostTTL().toMillis()); } private static ClusterInfo clusterInfoFromSlime(Inspector clusterInfoObject) { if ( ! clusterInfoObject.valid()) return ClusterInfo.empty(); ClusterInfo.Builder builder = new ClusterInfo.Builder(); builder.bcpDeadline(Duration.ofMinutes(clusterInfoObject.field(bcpDeadlineKey).asLong())); builder.hostTTL(Duration.ofMillis(clusterInfoObject.field(hostTTLKey).asLong())); return builder.build(); } private static void toSlime(BcpGroupInfo bcpGroupInfo, Cursor bcpGroupInfoObject) { bcpGroupInfoObject.setDouble(queryRateKey, bcpGroupInfo.queryRate()); bcpGroupInfoObject.setDouble(growthRateHeadroomKey, bcpGroupInfo.growthRateHeadroom()); bcpGroupInfoObject.setDouble(cpuCostPerQueryKey, bcpGroupInfo.cpuCostPerQuery()); } private static BcpGroupInfo bcpGroupInfoFromSlime(Inspector bcpGroupInfoObject) { if ( ! bcpGroupInfoObject.valid()) return BcpGroupInfo.empty(); return new BcpGroupInfo(bcpGroupInfoObject.field(queryRateKey).asDouble(), bcpGroupInfoObject.field(growthRateHeadroomKey).asDouble(), bcpGroupInfoObject.field(cpuCostPerQueryKey).asDouble()); } private static void scalingEventsToSlime(List<ScalingEvent> scalingEvents, Cursor eventArray) { scalingEvents.forEach(event -> toSlime(event, eventArray.addObject())); } private static List<ScalingEvent> scalingEventsFromSlime(Inspector eventArray) { return SlimeUtils.entriesStream(eventArray).map(item -> scalingEventFromSlime(item)).toList(); } private static void toSlime(ScalingEvent event, Cursor object) { toSlime(event.from(), object.setObject(fromKey)); toSlime(event.to(), object.setObject(toKey)); object.setLong(generationKey, event.generation()); object.setLong(atKey, event.at().toEpochMilli()); event.completion().ifPresent(completion -> object.setLong(completionKey, completion.toEpochMilli())); } private static ScalingEvent scalingEventFromSlime(Inspector inspector) { return new ScalingEvent(clusterResourcesFromSlime(inspector.field(fromKey)), clusterResourcesFromSlime(inspector.field(toKey)), inspector.field(generationKey).asLong(), Instant.ofEpochMilli(inspector.field(atKey).asLong()), optionalInstant(inspector.field(completionKey))); } private static String toAutoscalingStatusCode(Autoscaling.Status status) { return switch (status) { case unavailable -> "unavailable"; case waiting -> "waiting"; case ideal -> "ideal"; case insufficient -> "insufficient"; case rescaling -> "rescaling"; }; } private static Autoscaling.Status fromAutoscalingStatusCode(String code) { return switch (code) { case "" -> Autoscaling.Status.unavailable; case "unavailable" -> Autoscaling.Status.unavailable; case "waiting" -> Autoscaling.Status.waiting; case "ideal" -> Autoscaling.Status.ideal; case "insufficient" -> Autoscaling.Status.insufficient; case "rescaling" -> Autoscaling.Status.rescaling; default -> throw new IllegalArgumentException("Unknown autoscaling status '" + code + "'"); }; } private static Optional<Instant> optionalInstant(Inspector inspector) { return inspector.valid() ? Optional.of(Instant.ofEpochMilli(inspector.asLong())) : Optional.empty(); } private static OptionalInt optionalInt(Inspector inspector) { return inspector.valid() ? OptionalInt.of((int)inspector.asLong()) : OptionalInt.empty(); } }
It seems like the return statement was wrongly moved to the previous line?
private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder);return cfgBuilder.build(); }
vsmFields.getConfig(cfgBuilder);return cfgBuilder.build();
private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder);return cfgBuilder.build(); }
class VsmFieldsTestCase { static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); testIndexMatching(new Matching(MatchType.WORD), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "word"); testIndexMatching(new Matching(MatchType.WORD).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, "word"); } }
class VsmFieldsTestCase { static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); testIndexMatching(new Matching(MatchType.WORD), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "word"); testIndexMatching(new Matching(MatchType.WORD).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, "word"); } }
Falcon eye :) Fixed in https://github.com/vespa-engine/vespa/pull/29897/commits/5ee662b82653ab871608fae7b80d10e0cf63b076
private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder);return cfgBuilder.build(); }
vsmFields.getConfig(cfgBuilder);return cfgBuilder.build();
private static VsmfieldsConfig vsmfieldsConfig(Schema schema) { VsmFields vsmFields = new VsmFields(schema); VsmfieldsConfig.Builder cfgBuilder = new VsmfieldsConfig.Builder(); vsmFields.getConfig(cfgBuilder);return cfgBuilder.build(); }
class VsmFieldsTestCase { static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); testIndexMatching(new Matching(MatchType.WORD), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "word"); testIndexMatching(new Matching(MatchType.WORD).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, "word"); } }
class VsmFieldsTestCase { static Schema createSchema() { Schema schema = new Schema("test", MockApplicationPackage.createEmpty(), new MockFileRegistry(), new TestableDeployLogger(), new TestProperties()); var sdoc = new SDDocumentType("test"); schema.addDocument(sdoc); return schema; } @Test void reference_type_field_is_unsearchable() { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "ref_field", NewDocumentReferenceDataType.forDocumentName("parent_type")); field.parseIndexingScript("{ summary }"); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); assertEquals(1, cfg.fieldspec().size()); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("ref_field", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.NONE, fieldSpec.searchmethod()); } private void testIndexMatching(Matching matching, VsmfieldsConfig.Fieldspec.Normalize.Enum normalize, String arg1) { Schema schema = createSchema(); SDField field = new TemporarySDField(schema.getDocument(), "f", DataType.STRING); field.parseIndexingScript("{ index }"); field.setMatching(matching); schema.getDocument().addField(field); VsmfieldsConfig cfg = vsmfieldsConfig(schema); VsmfieldsConfig.Fieldspec fieldSpec = cfg.fieldspec().get(0); assertEquals("f", fieldSpec.name()); assertEquals(VsmfieldsConfig.Fieldspec.Searchmethod.AUTOUTF8, fieldSpec.searchmethod()); assertEquals(normalize, fieldSpec.normalize()); assertEquals(arg1, fieldSpec.arg1()); } @Test void test_exact_string() { testIndexMatching(new Matching(MatchType.TEXT), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE_AND_FOLD, ""); testIndexMatching(new Matching(MatchType.TEXT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, ""); testIndexMatching(new Matching(MatchType.EXACT).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "exact"); testIndexMatching(new Matching(MatchType.WORD), VsmfieldsConfig.Fieldspec.Normalize.LOWERCASE, "word"); testIndexMatching(new Matching(MatchType.WORD).setCase(Case.CASED), VsmfieldsConfig.Fieldspec.Normalize.NONE, "word"); } }
```suggestion int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); ```
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int [] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
int [] indexesToKeep = createIndexesToKeep(argument.type(), dimensions);
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int [] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int [] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0;i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int [] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int[] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int[] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0; i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int[] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
See comment above
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size());
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int [] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int [] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0; i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int[] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int[] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int[] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0; i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int[] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
Consider checking for truncation here, if we (ever) support large enough tensors for that to happen. Or add a custom upper size.
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size());
static Tensor evaluate(Tensor argument, List<String> dimensions, Aggregator aggregator) { if ( ! dimensions.isEmpty() && ! argument.type().dimensionNames().containsAll(dimensions)) throw new IllegalArgumentException("Cannot reduce " + argument + " over dimensions " + dimensions + ": Not all those dimensions are present in this tensor"); if (dimensions.isEmpty() || dimensions.size() == argument.type().dimensions().size()) if (argument.isEmpty()) return Tensor.from(0.0); else if (argument.type().dimensions().size() == 1 && argument instanceof IndexedTensor) return reduceIndexedVector((IndexedTensor)argument, aggregator); else return reduceAllGeneral(argument, aggregator); TensorType reducedType = outputType(argument.type(), dimensions); int[] indexesToKeep = createIndexesToKeep(argument.type(), dimensions); Map<TensorAddress, ValueAggregator> aggregatingCells = new HashMap<>((int)argument.size()); for (Iterator<Tensor.Cell> i = argument.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> cell = i.next(); TensorAddress reducedAddress = reduceDimensions(indexesToKeep, cell.getKey()); ValueAggregator aggr = aggregatingCells.putIfAbsent(reducedAddress, ValueAggregator.ofType(aggregator)); if (aggr == null) aggr = aggregatingCells.get(reducedAddress); aggr.aggregate(cell.getValue()); } Tensor.Builder reducedBuilder = Tensor.Builder.of(reducedType); for (Map.Entry<TensorAddress, ValueAggregator> aggregatingCell : aggregatingCells.entrySet()) reducedBuilder.cell(aggregatingCell.getKey(), aggregatingCell.getValue().aggregatedValue()); return reducedBuilder.build(); }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int [] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int [] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0; i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int[] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
class Reduce<NAMETYPE extends Name> extends PrimitiveTensorFunction<NAMETYPE> { public enum Aggregator { avg, count, max, median, min, prod, sum ; } private final TensorFunction<NAMETYPE> argument; private final List<String> dimensions; private final Aggregator aggregator; /** Creates a reduce function reducing all dimensions */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator) { this(argument, aggregator, List.of()); } /** Creates a reduce function reducing a single dimension */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, String dimension) { this(argument, aggregator, List.of(dimension)); } /** * Creates a reduce function. * * @param argument the tensor to reduce * @param aggregator the aggregator function to use * @param dimensions the list of dimensions to remove. If an empty list is given, all dimensions are reduced, * producing a dimensionless tensor (a scalar). * @throws IllegalArgumentException if any of the tensor dimensions are not present in the input tensor */ public Reduce(TensorFunction<NAMETYPE> argument, Aggregator aggregator, List<String> dimensions) { this.argument = Objects.requireNonNull(argument, "The argument tensor cannot be null"); this.aggregator = Objects.requireNonNull(aggregator, "The aggregator cannot be null"); this.dimensions = List.copyOf(dimensions); } public static TensorType outputType(TensorType inputType, List<String> reduceDimensions) { return TypeResolver.reduce(inputType, reduceDimensions); } public TensorFunction<NAMETYPE> argument() { return argument; } Aggregator aggregator() { return aggregator; } List<String> dimensions() { return dimensions; } @Override public List<TensorFunction<NAMETYPE>> arguments() { return List.of(argument); } @Override public TensorFunction<NAMETYPE> withArguments(List<TensorFunction<NAMETYPE>> arguments) { if ( arguments.size() != 1) throw new IllegalArgumentException("Reduce must have 1 argument, got " + arguments.size()); return new Reduce<>(arguments.get(0), aggregator, dimensions); } @Override public PrimitiveTensorFunction<NAMETYPE> toPrimitive() { return new Reduce<>(argument.toPrimitive(), aggregator, dimensions); } @Override public String toString(ToStringContext<NAMETYPE> context) { return "reduce(" + argument.toString(context) + ", " + aggregator + commaSeparated(dimensions) + ")"; } static String commaSeparated(List<String> list) { StringBuilder b = new StringBuilder(); for (String element : list) b.append(", ").append(element); return b.toString(); } @Override public TensorType type(TypeContext<NAMETYPE> context) { return outputType(argument.type(context), dimensions); } @Override public Tensor evaluate(EvaluationContext<NAMETYPE> context) { return evaluate(this.argument.evaluate(context), dimensions, aggregator); } @Override public int hashCode() { return Objects.hash("reduce", argument, dimensions, aggregator); } private static int[] createIndexesToKeep(TensorType argumentType, List<String> dimensions) { Set<Integer> indexesToRemove = new HashSet<>(dimensions.size()*2); for (String dimensionToRemove : dimensions) indexesToRemove.add(argumentType.indexOfDimension(dimensionToRemove).get()); int[] indexesToKeep = new int[argumentType.rank() - indexesToRemove.size()]; int toKeepIndex = 0; for (int i = 0; i < argumentType.rank(); i++) { if ( ! indexesToRemove.contains(i)) indexesToKeep[toKeepIndex++] = i; } return indexesToKeep; } private static TensorAddress reduceDimensions(int[] indexesToKeep, TensorAddress address) { String[] reducedLabels = new String[indexesToKeep.length]; int reducedLabelIndex = 0; for (int toKeep : indexesToKeep) reducedLabels[reducedLabelIndex++] = address.label(toKeep); return TensorAddress.of(reducedLabels); } private static Tensor reduceAllGeneral(Tensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (Iterator<Double> i = argument.valueIterator(); i.hasNext(); ) valueAggregator.aggregate(i.next()); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } private static Tensor reduceIndexedVector(IndexedTensor argument, Aggregator aggregator) { ValueAggregator valueAggregator = ValueAggregator.ofType(aggregator); for (int i = 0; i < argument.dimensionSizes().size(0); i++) valueAggregator.aggregate(argument.get(i)); return Tensor.Builder.of(TensorType.empty).cell((valueAggregator.aggregatedValue())).build(); } static abstract class ValueAggregator { static ValueAggregator ofType(Aggregator aggregator) { return switch (aggregator) { case avg -> new AvgAggregator(); case count -> new CountAggregator(); case max -> new MaxAggregator(); case median -> new MedianAggregator(); case min -> new MinAggregator(); case prod -> new ProdAggregator(); case sum -> new SumAggregator(); default -> throw new UnsupportedOperationException("Aggregator " + aggregator + " is not implemented"); }; } /** Add a new value to those aggregated by this */ public abstract void aggregate(double value); /** Returns the value aggregated by this */ public abstract double aggregatedValue(); /** Resets the aggregator */ public abstract void reset(); /** Returns a hash of this aggregator which only depends on its identity */ @Override public abstract int hashCode(); } private static class AvgAggregator extends ValueAggregator { private int valueCount = 0; private double valueSum = 0.0; @Override public void aggregate(double value) { valueCount++; valueSum+= value; } @Override public double aggregatedValue() { return valueSum / valueCount; } @Override public void reset() { valueCount = 0; valueSum = 0.0; } @Override public int hashCode() { return "avgAggregator".hashCode(); } } private static class CountAggregator extends ValueAggregator { private int valueCount = 0; @Override public void aggregate(double value) { valueCount++; } @Override public double aggregatedValue() { return valueCount; } @Override public void reset() { valueCount = 0; } @Override public int hashCode() { return "countAggregator".hashCode(); } } private static class MaxAggregator extends ValueAggregator { private double maxValue = Double.NEGATIVE_INFINITY; @Override public void aggregate(double value) { if (value > maxValue) maxValue = value; } @Override public double aggregatedValue() { return maxValue; } @Override public void reset() { maxValue = Double.NEGATIVE_INFINITY; } @Override public int hashCode() { return "maxAggregator".hashCode(); } } private static class MedianAggregator extends ValueAggregator { /** If any NaN is added, the result should be NaN */ private boolean isNaN = false; private List<Double> values = new ArrayList<>(); @Override public void aggregate(double value) { if ( Double.isNaN(value)) isNaN = true; if ( ! isNaN) values.add(value); } @Override public double aggregatedValue() { if (isNaN || values.isEmpty()) return Double.NaN; Collections.sort(values); if (values.size() % 2 == 0) return ( values.get(values.size() / 2 - 1) + values.get(values.size() / 2) ) / 2; else return values.get((values.size() - 1)/ 2); } @Override public void reset() { isNaN = false; values = new ArrayList<>(); } @Override public int hashCode() { return "medianAggregator".hashCode(); } } private static class MinAggregator extends ValueAggregator { private double minValue = Double.POSITIVE_INFINITY; @Override public void aggregate(double value) { if (value < minValue) minValue = value; } @Override public double aggregatedValue() { return minValue; } @Override public void reset() { minValue = Double.POSITIVE_INFINITY; } @Override public int hashCode() { return "minAggregator".hashCode(); } } private static class ProdAggregator extends ValueAggregator { private double valueProd = 1.0; @Override public void aggregate(double value) { valueProd *= value; } @Override public double aggregatedValue() { return valueProd; } @Override public void reset() { valueProd = 1.0; } @Override public int hashCode() { return "prodAggregator".hashCode(); } } private static class SumAggregator extends ValueAggregator { private double valueSum = 0.0; @Override public void aggregate(double value) { valueSum += value; } @Override public double aggregatedValue() { return valueSum; } @Override public void reset() { valueSum = 0.0; } @Override public int hashCode() { return "sumAggregator".hashCode(); } } }
Add a test of trying to get a model for a language not present in the registry.
public void testDefaultSignificanceModelRegistry() { HashMap<Language, Path> models = new HashMap<>(); models.put(Language.ENGLISH, Path.of("src/test/models/en.json")); models.put(Language.NORWEGIAN_BOKMAL, Path.of("src/test/models/no.json")); DefaultSignificanceModelRegistry defaultSignificanceModelRegistry = new DefaultSignificanceModelRegistry(models); var englishModel = defaultSignificanceModelRegistry.getModel(Language.ENGLISH); var norwegianModel = defaultSignificanceModelRegistry.getModel(Language.NORWEGIAN_BOKMAL); assertNotNull(englishModel); assertNotNull(norwegianModel); assertEquals(2, englishModel.documentFrequency("test").frequency()); assertEquals(10, englishModel.documentFrequency("test").corpusSize()); assertEquals(3, norwegianModel.documentFrequency("nei").frequency()); assertEquals(20, norwegianModel.documentFrequency("nei").corpusSize()); }
var norwegianModel = defaultSignificanceModelRegistry.getModel(Language.NORWEGIAN_BOKMAL);
public void testDefaultSignificanceModelRegistry() { HashMap<Language, Path> models = new HashMap<>(); models.put(Language.ENGLISH, Path.of("src/test/models/en.json")); models.put(Language.NORWEGIAN_BOKMAL, Path.of("src/test/models/no.json")); DefaultSignificanceModelRegistry defaultSignificanceModelRegistry = new DefaultSignificanceModelRegistry(models); var englishModel = defaultSignificanceModelRegistry.getModel(Language.ENGLISH); var norwegianModel = defaultSignificanceModelRegistry.getModel(Language.NORWEGIAN_BOKMAL); assertThrows(IllegalArgumentException.class, () -> defaultSignificanceModelRegistry.getModel(Language.FRENCH)); assertNotNull(englishModel); assertNotNull(norwegianModel); assertEquals(2, englishModel.documentFrequency("test").frequency()); assertEquals(10, englishModel.documentFrequency("test").corpusSize()); assertEquals(3, norwegianModel.documentFrequency("nei").frequency()); assertEquals(20, norwegianModel.documentFrequency("nei").corpusSize()); assertEquals(1, norwegianModel.documentFrequency("non-existent-word").frequency()); assertEquals(20, norwegianModel.documentFrequency("non-existent-word").corpusSize()); }
class DefaultSignificanceModelRegistryTest { @Test }
class DefaultSignificanceModelRegistryTest { @Test }
Leftover debugging ... ?
private Item anyItemsBody(boolean topLevel) { Item topLevelItem = null; NotItem not = null; Item item; do { item = positiveItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addPositiveItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addPositiveItem(item); } } if (item == null) { item = negativeItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addNegativeItem(item); topLevelItem = combineItems(topLevelItem, not); System.out.println("not : " + not + " not poisitve: " + not.getPositiveItem()); } else { not.addNegativeItem(item); } } } if (item == null) { item = compositeItem(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else { topLevelItem = combineItems(topLevelItem, item); } } } if (item == null) { item = indexableItem().getFirst(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else if (needNewORTopLevel(topLevelItem, item)) { CompositeItem newTop = new OrItem(); newTop.addItem(topLevelItem); newTop.addItem(item); topLevelItem = newTop; } else if (topLevelItem instanceof NotItem) { topLevelItem = combineItems(topLevelItem, item); } else { ((CompositeItem) topLevelItem).addItem(item); } } } if (topLevel && item == null) { tokens.skip(); } } while (tokens.hasNext() && (topLevel || item != null)); if (not != null && not.getItemCount() == 1) { if (topLevelItem == null || topLevelItem == not) { return not.removeItem(0); } else if (topLevelItem instanceof RankItem) { removeNot((RankItem) topLevelItem); return combineItems(topLevelItem, not.getPositiveItem()); } } if (not != null && not.getPositiveItem() instanceof TrueItem) { if (topLevelItem != null && topLevelItem != not) { not.addPositiveItem(getItemAsPositiveItem(topLevelItem, not)); return not; } else { return not; } } if (topLevelItem != null) { return topLevelItem; } else { return not; } }
System.out.println("not : " + not + " not poisitve: " + not.getPositiveItem());
private Item anyItemsBody(boolean topLevel) { Item topLevelItem = null; NotItem not = null; Item item; do { item = positiveItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addPositiveItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addPositiveItem(item); } } if (item == null) { item = negativeItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addNegativeItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addNegativeItem(item); } } } if (item == null) { item = compositeItem(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else { topLevelItem = combineItems(topLevelItem, item); } } } if (item == null) { item = indexableItem().getFirst(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else if (needNewORTopLevel(topLevelItem, item)) { CompositeItem newTop = new OrItem(); newTop.addItem(topLevelItem); newTop.addItem(item); topLevelItem = newTop; } else if (topLevelItem instanceof NotItem) { topLevelItem = combineItems(topLevelItem, item); } else { ((CompositeItem) topLevelItem).addItem(item); } } } if (topLevel && item == null) { tokens.skip(); } } while (tokens.hasNext() && (topLevel || item != null)); if (not != null && not.getItemCount() == 1) { if (topLevelItem == null || topLevelItem == not) { return not.removeItem(0); } else if (topLevelItem instanceof RankItem) { removeNot((RankItem) topLevelItem); return combineItems(topLevelItem, not.getPositiveItem()); } } if (not != null && not.getPositiveItem() instanceof TrueItem) { if (topLevelItem != null && topLevelItem != not) { not.addPositiveItem(getItemAsPositiveItem(topLevelItem, not)); return not; } else { return not; } } if (topLevelItem != null) { return topLevelItem; } else { return not; } }
class SimpleParser extends StructuredParser { protected SimpleParser(ParserEnvironment environment) { super(environment); } protected Item handleComposite(boolean topLevel) { return anyItems(false); } protected abstract Item negativeItem(); /** * A collection of one or more items. * More items are collected in the default composite - or. * If there's a explicit composite and some other terms, * a rank terms combines them */ protected Item anyItems(boolean topLevel) { int position = tokens.getPosition(); Item item = null; try { item = anyItemsBody(topLevel); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** Says whether we need a new top level OR item given the new item */ private boolean needNewORTopLevel(Item topLevelItem, Item item) { if (item == null) return false; if (topLevelItem instanceof TermItem) return true; if (topLevelItem instanceof PhraseItem) return true; if (topLevelItem instanceof BlockItem) return true; if ( topLevelItem instanceof AndItem) return true; return false; } /** Removes and returns the first <i>not</i> found in the composite, or returns null if there's none */ private NotItem removeNot(CompositeItem composite) { for (int i = 0; i < composite.getItemCount(); i++) { if (composite.getItem(i) instanceof NotItem) { return (NotItem) composite.removeItem(i); } } return null; } protected abstract Item combineItems(Item topLevelItem, Item item); protected Item positiveItem() { int position = tokens.getPosition(); Item item = null; try { if ( ! tokens.skipMultiple(PLUS)) { return null; } if (tokens.currentIsNoIgnore(SPACE)) { return null; } item = indexableItem().getFirst(); if (item == null) { item = compositeItem(); } if (item!=null) item.setProtected(true); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** * Returns the content of the given item as an item to be added as a positive item. * Used to turn a top level item into implicit positives when explicit positives * (+ items) are not found, but negatives are. */ private Item getItemAsPositiveItem(Item item, NotItem not) { if (!(item instanceof RankItem rank)) { return item; } int limit = rank.getItemCount(); int n = 0; while (n < limit) { if (rank.getItem(n) == not) { rank.removeItem(n); break; } n++; } if (rank.getItemCount() == 1) { return rank.getItem(0); } OrItem or = new OrItem(); for (Iterator<Item> i = rank.getItemIterator(); i.hasNext();) { or.addItem(i.next()); } return or; } }
class SimpleParser extends StructuredParser { protected SimpleParser(ParserEnvironment environment) { super(environment); } protected Item handleComposite(boolean topLevel) { return anyItems(false); } protected abstract Item negativeItem(); /** * A collection of one or more items. * More items are collected in the default composite - or. * If there's a explicit composite and some other terms, * a rank terms combines them */ protected Item anyItems(boolean topLevel) { int position = tokens.getPosition(); Item item = null; try { item = anyItemsBody(topLevel); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** Says whether we need a new top level OR item given the new item */ private boolean needNewORTopLevel(Item topLevelItem, Item item) { if (item == null) return false; if (topLevelItem instanceof TermItem) return true; if (topLevelItem instanceof PhraseItem) return true; if (topLevelItem instanceof BlockItem) return true; if ( topLevelItem instanceof AndItem) return true; return false; } /** Removes and returns the first <i>not</i> found in the composite, or returns null if there's none */ private NotItem removeNot(CompositeItem composite) { for (int i = 0; i < composite.getItemCount(); i++) { if (composite.getItem(i) instanceof NotItem) { return (NotItem) composite.removeItem(i); } } return null; } protected abstract Item combineItems(Item topLevelItem, Item item); protected Item positiveItem() { int position = tokens.getPosition(); Item item = null; try { if ( ! tokens.skipMultiple(PLUS)) { return null; } if (tokens.currentIsNoIgnore(SPACE)) { return null; } item = indexableItem().getFirst(); if (item == null) { item = compositeItem(); } if (item!=null) item.setProtected(true); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** * Returns the content of the given item as an item to be added as a positive item. * Used to turn a top level item into implicit positives when explicit positives * (+ items) are not found, but negatives are. */ private Item getItemAsPositiveItem(Item item, NotItem not) { if (!(item instanceof RankItem rank)) { return item; } int limit = rank.getItemCount(); int n = 0; while (n < limit) { if (rank.getItem(n) == not) { rank.removeItem(n); break; } n++; } if (rank.getItemCount() == 1) { return rank.getItem(0); } OrItem or = new OrItem(); for (Iterator<Item> i = rank.getItemIterator(); i.hasNext();) { or.addItem(i.next()); } return or; } }
thanks!
private Item anyItemsBody(boolean topLevel) { Item topLevelItem = null; NotItem not = null; Item item; do { item = positiveItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addPositiveItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addPositiveItem(item); } } if (item == null) { item = negativeItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addNegativeItem(item); topLevelItem = combineItems(topLevelItem, not); System.out.println("not : " + not + " not poisitve: " + not.getPositiveItem()); } else { not.addNegativeItem(item); } } } if (item == null) { item = compositeItem(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else { topLevelItem = combineItems(topLevelItem, item); } } } if (item == null) { item = indexableItem().getFirst(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else if (needNewORTopLevel(topLevelItem, item)) { CompositeItem newTop = new OrItem(); newTop.addItem(topLevelItem); newTop.addItem(item); topLevelItem = newTop; } else if (topLevelItem instanceof NotItem) { topLevelItem = combineItems(topLevelItem, item); } else { ((CompositeItem) topLevelItem).addItem(item); } } } if (topLevel && item == null) { tokens.skip(); } } while (tokens.hasNext() && (topLevel || item != null)); if (not != null && not.getItemCount() == 1) { if (topLevelItem == null || topLevelItem == not) { return not.removeItem(0); } else if (topLevelItem instanceof RankItem) { removeNot((RankItem) topLevelItem); return combineItems(topLevelItem, not.getPositiveItem()); } } if (not != null && not.getPositiveItem() instanceof TrueItem) { if (topLevelItem != null && topLevelItem != not) { not.addPositiveItem(getItemAsPositiveItem(topLevelItem, not)); return not; } else { return not; } } if (topLevelItem != null) { return topLevelItem; } else { return not; } }
System.out.println("not : " + not + " not poisitve: " + not.getPositiveItem());
private Item anyItemsBody(boolean topLevel) { Item topLevelItem = null; NotItem not = null; Item item; do { item = positiveItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addPositiveItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addPositiveItem(item); } } if (item == null) { item = negativeItem(); if (item != null) { if (not == null) { not = new NotItem(); not.addNegativeItem(item); topLevelItem = combineItems(topLevelItem, not); } else { not.addNegativeItem(item); } } } if (item == null) { item = compositeItem(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else { topLevelItem = combineItems(topLevelItem, item); } } } if (item == null) { item = indexableItem().getFirst(); if (item != null) { if (topLevelItem == null) { topLevelItem = item; } else if (needNewORTopLevel(topLevelItem, item)) { CompositeItem newTop = new OrItem(); newTop.addItem(topLevelItem); newTop.addItem(item); topLevelItem = newTop; } else if (topLevelItem instanceof NotItem) { topLevelItem = combineItems(topLevelItem, item); } else { ((CompositeItem) topLevelItem).addItem(item); } } } if (topLevel && item == null) { tokens.skip(); } } while (tokens.hasNext() && (topLevel || item != null)); if (not != null && not.getItemCount() == 1) { if (topLevelItem == null || topLevelItem == not) { return not.removeItem(0); } else if (topLevelItem instanceof RankItem) { removeNot((RankItem) topLevelItem); return combineItems(topLevelItem, not.getPositiveItem()); } } if (not != null && not.getPositiveItem() instanceof TrueItem) { if (topLevelItem != null && topLevelItem != not) { not.addPositiveItem(getItemAsPositiveItem(topLevelItem, not)); return not; } else { return not; } } if (topLevelItem != null) { return topLevelItem; } else { return not; } }
class SimpleParser extends StructuredParser { protected SimpleParser(ParserEnvironment environment) { super(environment); } protected Item handleComposite(boolean topLevel) { return anyItems(false); } protected abstract Item negativeItem(); /** * A collection of one or more items. * More items are collected in the default composite - or. * If there's a explicit composite and some other terms, * a rank terms combines them */ protected Item anyItems(boolean topLevel) { int position = tokens.getPosition(); Item item = null; try { item = anyItemsBody(topLevel); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** Says whether we need a new top level OR item given the new item */ private boolean needNewORTopLevel(Item topLevelItem, Item item) { if (item == null) return false; if (topLevelItem instanceof TermItem) return true; if (topLevelItem instanceof PhraseItem) return true; if (topLevelItem instanceof BlockItem) return true; if ( topLevelItem instanceof AndItem) return true; return false; } /** Removes and returns the first <i>not</i> found in the composite, or returns null if there's none */ private NotItem removeNot(CompositeItem composite) { for (int i = 0; i < composite.getItemCount(); i++) { if (composite.getItem(i) instanceof NotItem) { return (NotItem) composite.removeItem(i); } } return null; } protected abstract Item combineItems(Item topLevelItem, Item item); protected Item positiveItem() { int position = tokens.getPosition(); Item item = null; try { if ( ! tokens.skipMultiple(PLUS)) { return null; } if (tokens.currentIsNoIgnore(SPACE)) { return null; } item = indexableItem().getFirst(); if (item == null) { item = compositeItem(); } if (item!=null) item.setProtected(true); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** * Returns the content of the given item as an item to be added as a positive item. * Used to turn a top level item into implicit positives when explicit positives * (+ items) are not found, but negatives are. */ private Item getItemAsPositiveItem(Item item, NotItem not) { if (!(item instanceof RankItem rank)) { return item; } int limit = rank.getItemCount(); int n = 0; while (n < limit) { if (rank.getItem(n) == not) { rank.removeItem(n); break; } n++; } if (rank.getItemCount() == 1) { return rank.getItem(0); } OrItem or = new OrItem(); for (Iterator<Item> i = rank.getItemIterator(); i.hasNext();) { or.addItem(i.next()); } return or; } }
class SimpleParser extends StructuredParser { protected SimpleParser(ParserEnvironment environment) { super(environment); } protected Item handleComposite(boolean topLevel) { return anyItems(false); } protected abstract Item negativeItem(); /** * A collection of one or more items. * More items are collected in the default composite - or. * If there's a explicit composite and some other terms, * a rank terms combines them */ protected Item anyItems(boolean topLevel) { int position = tokens.getPosition(); Item item = null; try { item = anyItemsBody(topLevel); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** Says whether we need a new top level OR item given the new item */ private boolean needNewORTopLevel(Item topLevelItem, Item item) { if (item == null) return false; if (topLevelItem instanceof TermItem) return true; if (topLevelItem instanceof PhraseItem) return true; if (topLevelItem instanceof BlockItem) return true; if ( topLevelItem instanceof AndItem) return true; return false; } /** Removes and returns the first <i>not</i> found in the composite, or returns null if there's none */ private NotItem removeNot(CompositeItem composite) { for (int i = 0; i < composite.getItemCount(); i++) { if (composite.getItem(i) instanceof NotItem) { return (NotItem) composite.removeItem(i); } } return null; } protected abstract Item combineItems(Item topLevelItem, Item item); protected Item positiveItem() { int position = tokens.getPosition(); Item item = null; try { if ( ! tokens.skipMultiple(PLUS)) { return null; } if (tokens.currentIsNoIgnore(SPACE)) { return null; } item = indexableItem().getFirst(); if (item == null) { item = compositeItem(); } if (item!=null) item.setProtected(true); return item; } finally { if (item == null) { tokens.setPosition(position); } } } /** * Returns the content of the given item as an item to be added as a positive item. * Used to turn a top level item into implicit positives when explicit positives * (+ items) are not found, but negatives are. */ private Item getItemAsPositiveItem(Item item, NotItem not) { if (!(item instanceof RankItem rank)) { return item; } int limit = rank.getItemCount(); int n = 0; while (n < limit) { if (rank.getItem(n) == not) { rank.removeItem(n); break; } n++; } if (rank.getItemCount() == 1) { return rank.getItem(0); } OrItem or = new OrItem(); for (Iterator<Item> i = rank.getItemIterator(); i.hasNext();) { or.addItem(i.next()); } return or; } }
This previously appears to have tested a (presumed legacy) update syntax with explicit `"key": foo, "value": bar` mappings. With these changes it's identical to `testMapStringToArrayOfInt()`. Consider either testing the explicit KV syntax (if we want to test it) or removing this test entirely (if we don't).
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
"bamse": [1, 2, 3]
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
Ah, my mistake. I tried to verify all the AI generated JSON, but evidently I failed here.
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
"bamse": [1, 2, 3]
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
```suggestion "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] ```
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
}
public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
The `TODO` can probably be removed with this change
void merge_throttler_memory_limit_config_has_expected_defaults() { var config = configFromProperties(new TestProperties()); var limit = config.merge_throttling_memory_limit(); assertEquals(0L, limit.max_usage_bytes()); assertMergeAutoScaleConfigHasExpectedValues(limit); }
assertEquals(0L, limit.max_usage_bytes());
void merge_throttler_memory_limit_config_has_expected_defaults() { var config = configFromProperties(new TestProperties()); var limit = config.merge_throttling_memory_limit(); assertEquals(0L, limit.max_usage_bytes()); assertMergeAutoScaleConfigHasExpectedValues(limit); }
class StorageClusterTest { StorageCluster parse(String xml, Flavor flavor) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)).build()); return parse(xml, root); } StorageCluster parse(String xml, Flavor flavor, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)) .properties(properties).build()); return parse(xml, root); } StorageCluster parse(String xml, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .properties(properties) .applicationPackage(new MockApplicationPackage.Builder().build()) .build()); return parse(xml, root); } StorageCluster parse(String xml) { return parse(xml, new TestProperties()); } StorageCluster parse(String xml, MockRoot root) { root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("movies")) ); ContentCluster cluster = ContentClusterUtils.createCluster(xml, root); root.freezeModelTopology(); return cluster.getStorageCluster(); } private static String group() { return joinLines( "<group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", "</group>"); } private static String cluster(String clusterName, String insert) { return joinLines( "<content id=\"" + clusterName + "\">", "<redundancy>3</redundancy>" + "<documents/>", insert, group(), "</content>"); } @Test void testBasics() { StorageCluster storage = parse(cluster("foofighters", "")); assertEquals(1, storage.getChildren().size()); StorServerConfig.Builder builder = new StorServerConfig.Builder(); storage.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertFalse(config.is_distributor()); assertEquals("foofighters", config.cluster_name()); assertEquals(4, config.content_node_bucket_db_stripe_bits()); } @Test void testCommunicationManagerDefaults() { StorageCluster storage = parse(cluster("foofighters", "")); StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); storage.getChildren().get("0").getConfig(builder); StorCommunicationmanagerConfig config = new StorCommunicationmanagerConfig(builder); assertEquals(1, config.mbus().num_network_threads()); } @Test void testMergeDefaults() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", "")).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); assertTrue(config.disable_queue_limits_for_chained_merges()); } @Test void testMerges() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", joinLines( "<tuning>", " <merges max-per-node=\"1K\" max-queue-size=\"10K\"/>", "</tuning>"))).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1024, config.max_merges_per_node()); assertEquals(1024 * 10, config.max_merge_queue_size()); } private StorServerConfig configFromProperties(TestProperties properties) { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", ""), properties).getConfig(builder); return new StorServerConfig(builder); } private StorCommunicationmanagerConfig communicationmanagerConfigFromProperties(TestProperties properties) { StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); parse(cluster("foofighters", ""), properties).getChildren().values().iterator().next().getConfig(builder); return new StorCommunicationmanagerConfig(builder); } private StorFilestorConfig filestorConfigFromProducer(StorFilestorConfig.Producer producer) { var builder = new StorFilestorConfig.Builder(); producer.getConfig(builder); return new StorFilestorConfig(builder); } @Test void verifyDefaultMbusConfig() { var confg = communicationmanagerConfigFromProperties(new TestProperties()); assertEquals(1, confg.mbus().num_network_threads()); assertEquals(2, confg.mbus().num_rpc_targets()); assertEquals(1, confg.mbus().events_before_wakeup()); assertEquals(2, confg.rpc().num_targets_per_node()); assertEquals(1, confg.rpc().events_before_wakeup()); } @Test void verifyDefaultMbusConfigControl() { var confg = communicationmanagerConfigFromProperties(new TestProperties() .setMbusNetworkThreads(7) .setRpcNumTargets(11) .setRpcEventsBeforeWakeup(12) .setMbusCppRpcNumTargets(8) .setMbusCppEventsBeforeWakeup(9)); assertEquals(7, confg.mbus().num_network_threads()); assertEquals(8, confg.mbus().num_rpc_targets()); assertEquals(9, confg.mbus().events_before_wakeup()); assertEquals(11, confg.rpc().num_targets_per_node()); assertEquals(12, confg.rpc().events_before_wakeup()); } @Test void testMergeFeatureFlags() { var config = configFromProperties(new TestProperties()); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); } @Test void merge_throttling_policy_config_defaults_to_static() { var config = configFromProperties(new TestProperties()); assertEquals(StorServerConfig.Merge_throttling_policy.Type.STATIC, config.merge_throttling_policy().type()); } @Test void assertMergeAutoScaleConfigHasExpectedValues(StorServerConfig.Merge_throttling_memory_limit limit) { assertEquals(128L*1024*1024, limit.auto_lower_bound_bytes()); assertEquals(2L*1024*1024*1024, limit.auto_upper_bound_bytes()); assertEquals(0.03, limit.auto_phys_mem_scale_factor(), 0.000001); } @Test void testVisitors() { StorVisitorConfig.Builder builder = new StorVisitorConfig.Builder(); parse(cluster("bees", joinLines( "<tuning>", " <visitors thread-count=\"7\" max-queue-size=\"1000\">", " <max-concurrent fixed=\"42\" variable=\"100\"/>", " </visitors>", "</tuning>")) ).getConfig(builder); StorVisitorConfig config = new StorVisitorConfig(builder); assertEquals(42, config.maxconcurrentvisitors_fixed()); assertEquals(100, config.maxconcurrentvisitors_variable()); assertEquals(7, config.visitorthreads()); assertEquals(1000, config.maxvisitorqueuesize()); } @Test void testPersistenceThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(7, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); assertEquals(2, config.num_response_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(7, config.num_threads()); } } @Test void testResponseThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); var config = filestorConfigFromProducer(stc); assertEquals(2, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.ADAPTIVE, config.response_sequencer_type()); assertEquals(7, config.num_threads()); } @Test void testPersistenceThreadsOld() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads>", " <thread lowest-priority=\"VERY_LOW\" count=\"2\"/>", " <thread lowest-priority=\"VERY_HIGH\" count=\"1\"/>", " <thread count=\"1\"/>", " </persistence-threads>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(4, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(4, config.num_threads()); } } @Test void testNoPersistenceThreads() { StorageCluster stc = parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(8, config.num_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(9, config.num_threads()); } } private StorageCluster simpleCluster(ModelContext.Properties properties) { return parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()), properties); } @Test void testFeatureFlagControlOfResponseSequencer() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setResponseNumThreads(13).setResponseSequencerType("THROUGHPUT"))); assertEquals(13, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.THROUGHPUT, config.response_sequencer_type()); } private void verifyAsyncMessageHandlingOnSchedule(boolean expected, boolean value) { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setAsyncMessageHandlingOnSchedule(value))); assertEquals(expected, config.use_async_message_handling_on_schedule()); } @Test void testFeatureFlagControlOfAsyncMessageHandlingOnSchedule() { verifyAsyncMessageHandlingOnSchedule(false, false); verifyAsyncMessageHandlingOnSchedule(true, true); } @Test void persistence_dynamic_throttling_parameters_have_sane_defaults() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties())); assertEquals(StorFilestorConfig.Async_operation_throttler.Type.DYNAMIC, config.async_operation_throttler().type()); assertEquals(1.2, config.async_operation_throttler().window_size_decrement_factor(), 0.0001); assertEquals(0.95, config.async_operation_throttler().window_size_backoff(), 0.0001); assertEquals(20, config.async_operation_throttler().min_window_size()); assertEquals(-1, config.async_operation_throttler().max_window_size()); assertEquals(3.0, config.async_operation_throttler().resize_rate(), 0.0001); assertTrue(config.async_operation_throttler().throttle_individual_merge_feed_ops()); } @Test void testCapacity() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " <node distribution-key=\"1\" hostalias=\"mockhost\" capacity=\"1.5\"/>", " <node distribution-key=\"2\" hostalias=\"mockhost\" capacity=\"2.0\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); for (int i = 0; i < 3; ++i) { StorageNode node = cluster.getStorageCluster().getChildren().get("" + i); StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1.0 + (double) i * 0.5, config.node_capacity(), 0.001); } } @Test void testRootFolder() { ContentCluster cluster = ContentClusterUtils.createCluster(cluster("storage", ""), new MockRoot()); StorageNode node = cluster.getStorageCluster().getChildren().get("0"); { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/storage/0"), config.root_folder()); } { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getDistributorNodes().getConfig(builder); cluster.getDistributorNodes().getChildren().get("0").getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/distributor/0"), config.root_folder()); } } @Test void testGenericPersistenceTuning() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); PersistenceConfig.Builder builder = new PersistenceConfig.Builder(); cluster.getStorageCluster().getConfig(builder); PersistenceConfig config = new PersistenceConfig(builder); assertTrue(config.fail_partition_on_error()); assertEquals(34 * 60, config.revert_time_period()); assertEquals(5 * 24 * 60 * 60, config.keep_remove_time_period()); } @Test void requireThatUserDoesNotSpecifyBothGroupAndNodes() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <nodes>", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </nodes>", "</cluster>"); try { final MockRoot root = new MockRoot(); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); ContentClusterUtils.createCluster(xml, root); fail("Did not fail when having both group and nodes"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': Both <group> and <nodes> is specified: Only one of these tags can be used in the same configuration", Exceptions.toMessageString(e)); } } @Test void requireThatGroupNamesMustBeUniqueAmongstSiblings() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with duplicate group names"); } catch (RuntimeException e) { assertEquals("Cluster 'storage' has multiple groups with name 'bar' in the same subgroup. " + "Group sibling names must be unique.", e.getMessage()); } } @Test void requireThatGroupNamesCanBeDuplicatedAcrossLevels() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <group distribution-key=\"0\" name=\"foo\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " </group>", " <group distribution-key=\"0\" name=\"foo\">", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", " </group>", "</cluster>"); ContentClusterUtils.createCluster(xml, new MockRoot()); } @Test void requireThatNestedGroupsRequireDistribution() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"baz\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with missing distribution element"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': 'distribution' attribute is required with multiple subgroups", Exceptions.toMessageString(e)); } } }
class StorageClusterTest { StorageCluster parse(String xml, Flavor flavor) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)).build()); return parse(xml, root); } StorageCluster parse(String xml, Flavor flavor, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)) .properties(properties).build()); return parse(xml, root); } StorageCluster parse(String xml, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .properties(properties) .applicationPackage(new MockApplicationPackage.Builder().build()) .build()); return parse(xml, root); } StorageCluster parse(String xml) { return parse(xml, new TestProperties()); } StorageCluster parse(String xml, MockRoot root) { root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("movies")) ); ContentCluster cluster = ContentClusterUtils.createCluster(xml, root); root.freezeModelTopology(); return cluster.getStorageCluster(); } private static String group() { return joinLines( "<group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", "</group>"); } private static String cluster(String clusterName, String insert) { return joinLines( "<content id=\"" + clusterName + "\">", "<redundancy>3</redundancy>" + "<documents/>", insert, group(), "</content>"); } @Test void testBasics() { StorageCluster storage = parse(cluster("foofighters", "")); assertEquals(1, storage.getChildren().size()); StorServerConfig.Builder builder = new StorServerConfig.Builder(); storage.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertFalse(config.is_distributor()); assertEquals("foofighters", config.cluster_name()); assertEquals(4, config.content_node_bucket_db_stripe_bits()); } @Test void testCommunicationManagerDefaults() { StorageCluster storage = parse(cluster("foofighters", "")); StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); storage.getChildren().get("0").getConfig(builder); StorCommunicationmanagerConfig config = new StorCommunicationmanagerConfig(builder); assertEquals(1, config.mbus().num_network_threads()); } @Test void testMergeDefaults() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", "")).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); assertTrue(config.disable_queue_limits_for_chained_merges()); } @Test void testMerges() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", joinLines( "<tuning>", " <merges max-per-node=\"1K\" max-queue-size=\"10K\"/>", "</tuning>"))).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1024, config.max_merges_per_node()); assertEquals(1024 * 10, config.max_merge_queue_size()); } private StorServerConfig configFromProperties(TestProperties properties) { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", ""), properties).getConfig(builder); return new StorServerConfig(builder); } private StorCommunicationmanagerConfig communicationmanagerConfigFromProperties(TestProperties properties) { StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); parse(cluster("foofighters", ""), properties).getChildren().values().iterator().next().getConfig(builder); return new StorCommunicationmanagerConfig(builder); } private StorFilestorConfig filestorConfigFromProducer(StorFilestorConfig.Producer producer) { var builder = new StorFilestorConfig.Builder(); producer.getConfig(builder); return new StorFilestorConfig(builder); } @Test void verifyDefaultMbusConfig() { var confg = communicationmanagerConfigFromProperties(new TestProperties()); assertEquals(1, confg.mbus().num_network_threads()); assertEquals(2, confg.mbus().num_rpc_targets()); assertEquals(1, confg.mbus().events_before_wakeup()); assertEquals(2, confg.rpc().num_targets_per_node()); assertEquals(1, confg.rpc().events_before_wakeup()); } @Test void verifyDefaultMbusConfigControl() { var confg = communicationmanagerConfigFromProperties(new TestProperties() .setMbusNetworkThreads(7) .setRpcNumTargets(11) .setRpcEventsBeforeWakeup(12) .setMbusCppRpcNumTargets(8) .setMbusCppEventsBeforeWakeup(9)); assertEquals(7, confg.mbus().num_network_threads()); assertEquals(8, confg.mbus().num_rpc_targets()); assertEquals(9, confg.mbus().events_before_wakeup()); assertEquals(11, confg.rpc().num_targets_per_node()); assertEquals(12, confg.rpc().events_before_wakeup()); } @Test void testMergeFeatureFlags() { var config = configFromProperties(new TestProperties()); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); } @Test void merge_throttling_policy_config_defaults_to_static() { var config = configFromProperties(new TestProperties()); assertEquals(StorServerConfig.Merge_throttling_policy.Type.STATIC, config.merge_throttling_policy().type()); } @Test void assertMergeAutoScaleConfigHasExpectedValues(StorServerConfig.Merge_throttling_memory_limit limit) { assertEquals(128L*1024*1024, limit.auto_lower_bound_bytes()); assertEquals(2L*1024*1024*1024, limit.auto_upper_bound_bytes()); assertEquals(0.03, limit.auto_phys_mem_scale_factor(), 0.000001); } @Test void testVisitors() { StorVisitorConfig.Builder builder = new StorVisitorConfig.Builder(); parse(cluster("bees", joinLines( "<tuning>", " <visitors thread-count=\"7\" max-queue-size=\"1000\">", " <max-concurrent fixed=\"42\" variable=\"100\"/>", " </visitors>", "</tuning>")) ).getConfig(builder); StorVisitorConfig config = new StorVisitorConfig(builder); assertEquals(42, config.maxconcurrentvisitors_fixed()); assertEquals(100, config.maxconcurrentvisitors_variable()); assertEquals(7, config.visitorthreads()); assertEquals(1000, config.maxvisitorqueuesize()); } @Test void testPersistenceThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(7, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); assertEquals(2, config.num_response_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(7, config.num_threads()); } } @Test void testResponseThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); var config = filestorConfigFromProducer(stc); assertEquals(2, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.ADAPTIVE, config.response_sequencer_type()); assertEquals(7, config.num_threads()); } @Test void testPersistenceThreadsOld() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads>", " <thread lowest-priority=\"VERY_LOW\" count=\"2\"/>", " <thread lowest-priority=\"VERY_HIGH\" count=\"1\"/>", " <thread count=\"1\"/>", " </persistence-threads>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(4, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(4, config.num_threads()); } } @Test void testNoPersistenceThreads() { StorageCluster stc = parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(8, config.num_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(9, config.num_threads()); } } private StorageCluster simpleCluster(ModelContext.Properties properties) { return parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()), properties); } @Test void testFeatureFlagControlOfResponseSequencer() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setResponseNumThreads(13).setResponseSequencerType("THROUGHPUT"))); assertEquals(13, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.THROUGHPUT, config.response_sequencer_type()); } private void verifyAsyncMessageHandlingOnSchedule(boolean expected, boolean value) { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setAsyncMessageHandlingOnSchedule(value))); assertEquals(expected, config.use_async_message_handling_on_schedule()); } @Test void testFeatureFlagControlOfAsyncMessageHandlingOnSchedule() { verifyAsyncMessageHandlingOnSchedule(false, false); verifyAsyncMessageHandlingOnSchedule(true, true); } @Test void persistence_dynamic_throttling_parameters_have_sane_defaults() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties())); assertEquals(StorFilestorConfig.Async_operation_throttler.Type.DYNAMIC, config.async_operation_throttler().type()); assertEquals(1.2, config.async_operation_throttler().window_size_decrement_factor(), 0.0001); assertEquals(0.95, config.async_operation_throttler().window_size_backoff(), 0.0001); assertEquals(20, config.async_operation_throttler().min_window_size()); assertEquals(-1, config.async_operation_throttler().max_window_size()); assertEquals(3.0, config.async_operation_throttler().resize_rate(), 0.0001); assertTrue(config.async_operation_throttler().throttle_individual_merge_feed_ops()); } @Test void testCapacity() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " <node distribution-key=\"1\" hostalias=\"mockhost\" capacity=\"1.5\"/>", " <node distribution-key=\"2\" hostalias=\"mockhost\" capacity=\"2.0\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); for (int i = 0; i < 3; ++i) { StorageNode node = cluster.getStorageCluster().getChildren().get("" + i); StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1.0 + (double) i * 0.5, config.node_capacity(), 0.001); } } @Test void testRootFolder() { ContentCluster cluster = ContentClusterUtils.createCluster(cluster("storage", ""), new MockRoot()); StorageNode node = cluster.getStorageCluster().getChildren().get("0"); { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/storage/0"), config.root_folder()); } { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getDistributorNodes().getConfig(builder); cluster.getDistributorNodes().getChildren().get("0").getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/distributor/0"), config.root_folder()); } } @Test void testGenericPersistenceTuning() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); PersistenceConfig.Builder builder = new PersistenceConfig.Builder(); cluster.getStorageCluster().getConfig(builder); PersistenceConfig config = new PersistenceConfig(builder); assertTrue(config.fail_partition_on_error()); assertEquals(34 * 60, config.revert_time_period()); assertEquals(5 * 24 * 60 * 60, config.keep_remove_time_period()); } @Test void requireThatUserDoesNotSpecifyBothGroupAndNodes() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <nodes>", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </nodes>", "</cluster>"); try { final MockRoot root = new MockRoot(); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); ContentClusterUtils.createCluster(xml, root); fail("Did not fail when having both group and nodes"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': Both <group> and <nodes> is specified: Only one of these tags can be used in the same configuration", Exceptions.toMessageString(e)); } } @Test void requireThatGroupNamesMustBeUniqueAmongstSiblings() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with duplicate group names"); } catch (RuntimeException e) { assertEquals("Cluster 'storage' has multiple groups with name 'bar' in the same subgroup. " + "Group sibling names must be unique.", e.getMessage()); } } @Test void requireThatGroupNamesCanBeDuplicatedAcrossLevels() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <group distribution-key=\"0\" name=\"foo\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " </group>", " <group distribution-key=\"0\" name=\"foo\">", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", " </group>", "</cluster>"); ContentClusterUtils.createCluster(xml, new MockRoot()); } @Test void requireThatNestedGroupsRequireDistribution() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"baz\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with missing distribution element"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': 'distribution' attribute is required with multiple subgroups", Exceptions.toMessageString(e)); } } }
Fixed
void merge_throttler_memory_limit_config_has_expected_defaults() { var config = configFromProperties(new TestProperties()); var limit = config.merge_throttling_memory_limit(); assertEquals(0L, limit.max_usage_bytes()); assertMergeAutoScaleConfigHasExpectedValues(limit); }
assertEquals(0L, limit.max_usage_bytes());
void merge_throttler_memory_limit_config_has_expected_defaults() { var config = configFromProperties(new TestProperties()); var limit = config.merge_throttling_memory_limit(); assertEquals(0L, limit.max_usage_bytes()); assertMergeAutoScaleConfigHasExpectedValues(limit); }
class StorageClusterTest { StorageCluster parse(String xml, Flavor flavor) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)).build()); return parse(xml, root); } StorageCluster parse(String xml, Flavor flavor, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)) .properties(properties).build()); return parse(xml, root); } StorageCluster parse(String xml, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .properties(properties) .applicationPackage(new MockApplicationPackage.Builder().build()) .build()); return parse(xml, root); } StorageCluster parse(String xml) { return parse(xml, new TestProperties()); } StorageCluster parse(String xml, MockRoot root) { root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("movies")) ); ContentCluster cluster = ContentClusterUtils.createCluster(xml, root); root.freezeModelTopology(); return cluster.getStorageCluster(); } private static String group() { return joinLines( "<group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", "</group>"); } private static String cluster(String clusterName, String insert) { return joinLines( "<content id=\"" + clusterName + "\">", "<redundancy>3</redundancy>" + "<documents/>", insert, group(), "</content>"); } @Test void testBasics() { StorageCluster storage = parse(cluster("foofighters", "")); assertEquals(1, storage.getChildren().size()); StorServerConfig.Builder builder = new StorServerConfig.Builder(); storage.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertFalse(config.is_distributor()); assertEquals("foofighters", config.cluster_name()); assertEquals(4, config.content_node_bucket_db_stripe_bits()); } @Test void testCommunicationManagerDefaults() { StorageCluster storage = parse(cluster("foofighters", "")); StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); storage.getChildren().get("0").getConfig(builder); StorCommunicationmanagerConfig config = new StorCommunicationmanagerConfig(builder); assertEquals(1, config.mbus().num_network_threads()); } @Test void testMergeDefaults() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", "")).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); assertTrue(config.disable_queue_limits_for_chained_merges()); } @Test void testMerges() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", joinLines( "<tuning>", " <merges max-per-node=\"1K\" max-queue-size=\"10K\"/>", "</tuning>"))).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1024, config.max_merges_per_node()); assertEquals(1024 * 10, config.max_merge_queue_size()); } private StorServerConfig configFromProperties(TestProperties properties) { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", ""), properties).getConfig(builder); return new StorServerConfig(builder); } private StorCommunicationmanagerConfig communicationmanagerConfigFromProperties(TestProperties properties) { StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); parse(cluster("foofighters", ""), properties).getChildren().values().iterator().next().getConfig(builder); return new StorCommunicationmanagerConfig(builder); } private StorFilestorConfig filestorConfigFromProducer(StorFilestorConfig.Producer producer) { var builder = new StorFilestorConfig.Builder(); producer.getConfig(builder); return new StorFilestorConfig(builder); } @Test void verifyDefaultMbusConfig() { var confg = communicationmanagerConfigFromProperties(new TestProperties()); assertEquals(1, confg.mbus().num_network_threads()); assertEquals(2, confg.mbus().num_rpc_targets()); assertEquals(1, confg.mbus().events_before_wakeup()); assertEquals(2, confg.rpc().num_targets_per_node()); assertEquals(1, confg.rpc().events_before_wakeup()); } @Test void verifyDefaultMbusConfigControl() { var confg = communicationmanagerConfigFromProperties(new TestProperties() .setMbusNetworkThreads(7) .setRpcNumTargets(11) .setRpcEventsBeforeWakeup(12) .setMbusCppRpcNumTargets(8) .setMbusCppEventsBeforeWakeup(9)); assertEquals(7, confg.mbus().num_network_threads()); assertEquals(8, confg.mbus().num_rpc_targets()); assertEquals(9, confg.mbus().events_before_wakeup()); assertEquals(11, confg.rpc().num_targets_per_node()); assertEquals(12, confg.rpc().events_before_wakeup()); } @Test void testMergeFeatureFlags() { var config = configFromProperties(new TestProperties()); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); } @Test void merge_throttling_policy_config_defaults_to_static() { var config = configFromProperties(new TestProperties()); assertEquals(StorServerConfig.Merge_throttling_policy.Type.STATIC, config.merge_throttling_policy().type()); } @Test void assertMergeAutoScaleConfigHasExpectedValues(StorServerConfig.Merge_throttling_memory_limit limit) { assertEquals(128L*1024*1024, limit.auto_lower_bound_bytes()); assertEquals(2L*1024*1024*1024, limit.auto_upper_bound_bytes()); assertEquals(0.03, limit.auto_phys_mem_scale_factor(), 0.000001); } @Test void testVisitors() { StorVisitorConfig.Builder builder = new StorVisitorConfig.Builder(); parse(cluster("bees", joinLines( "<tuning>", " <visitors thread-count=\"7\" max-queue-size=\"1000\">", " <max-concurrent fixed=\"42\" variable=\"100\"/>", " </visitors>", "</tuning>")) ).getConfig(builder); StorVisitorConfig config = new StorVisitorConfig(builder); assertEquals(42, config.maxconcurrentvisitors_fixed()); assertEquals(100, config.maxconcurrentvisitors_variable()); assertEquals(7, config.visitorthreads()); assertEquals(1000, config.maxvisitorqueuesize()); } @Test void testPersistenceThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(7, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); assertEquals(2, config.num_response_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(7, config.num_threads()); } } @Test void testResponseThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); var config = filestorConfigFromProducer(stc); assertEquals(2, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.ADAPTIVE, config.response_sequencer_type()); assertEquals(7, config.num_threads()); } @Test void testPersistenceThreadsOld() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads>", " <thread lowest-priority=\"VERY_LOW\" count=\"2\"/>", " <thread lowest-priority=\"VERY_HIGH\" count=\"1\"/>", " <thread count=\"1\"/>", " </persistence-threads>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(4, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(4, config.num_threads()); } } @Test void testNoPersistenceThreads() { StorageCluster stc = parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(8, config.num_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(9, config.num_threads()); } } private StorageCluster simpleCluster(ModelContext.Properties properties) { return parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()), properties); } @Test void testFeatureFlagControlOfResponseSequencer() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setResponseNumThreads(13).setResponseSequencerType("THROUGHPUT"))); assertEquals(13, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.THROUGHPUT, config.response_sequencer_type()); } private void verifyAsyncMessageHandlingOnSchedule(boolean expected, boolean value) { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setAsyncMessageHandlingOnSchedule(value))); assertEquals(expected, config.use_async_message_handling_on_schedule()); } @Test void testFeatureFlagControlOfAsyncMessageHandlingOnSchedule() { verifyAsyncMessageHandlingOnSchedule(false, false); verifyAsyncMessageHandlingOnSchedule(true, true); } @Test void persistence_dynamic_throttling_parameters_have_sane_defaults() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties())); assertEquals(StorFilestorConfig.Async_operation_throttler.Type.DYNAMIC, config.async_operation_throttler().type()); assertEquals(1.2, config.async_operation_throttler().window_size_decrement_factor(), 0.0001); assertEquals(0.95, config.async_operation_throttler().window_size_backoff(), 0.0001); assertEquals(20, config.async_operation_throttler().min_window_size()); assertEquals(-1, config.async_operation_throttler().max_window_size()); assertEquals(3.0, config.async_operation_throttler().resize_rate(), 0.0001); assertTrue(config.async_operation_throttler().throttle_individual_merge_feed_ops()); } @Test void testCapacity() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " <node distribution-key=\"1\" hostalias=\"mockhost\" capacity=\"1.5\"/>", " <node distribution-key=\"2\" hostalias=\"mockhost\" capacity=\"2.0\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); for (int i = 0; i < 3; ++i) { StorageNode node = cluster.getStorageCluster().getChildren().get("" + i); StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1.0 + (double) i * 0.5, config.node_capacity(), 0.001); } } @Test void testRootFolder() { ContentCluster cluster = ContentClusterUtils.createCluster(cluster("storage", ""), new MockRoot()); StorageNode node = cluster.getStorageCluster().getChildren().get("0"); { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/storage/0"), config.root_folder()); } { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getDistributorNodes().getConfig(builder); cluster.getDistributorNodes().getChildren().get("0").getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/distributor/0"), config.root_folder()); } } @Test void testGenericPersistenceTuning() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); PersistenceConfig.Builder builder = new PersistenceConfig.Builder(); cluster.getStorageCluster().getConfig(builder); PersistenceConfig config = new PersistenceConfig(builder); assertTrue(config.fail_partition_on_error()); assertEquals(34 * 60, config.revert_time_period()); assertEquals(5 * 24 * 60 * 60, config.keep_remove_time_period()); } @Test void requireThatUserDoesNotSpecifyBothGroupAndNodes() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <nodes>", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </nodes>", "</cluster>"); try { final MockRoot root = new MockRoot(); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); ContentClusterUtils.createCluster(xml, root); fail("Did not fail when having both group and nodes"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': Both <group> and <nodes> is specified: Only one of these tags can be used in the same configuration", Exceptions.toMessageString(e)); } } @Test void requireThatGroupNamesMustBeUniqueAmongstSiblings() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with duplicate group names"); } catch (RuntimeException e) { assertEquals("Cluster 'storage' has multiple groups with name 'bar' in the same subgroup. " + "Group sibling names must be unique.", e.getMessage()); } } @Test void requireThatGroupNamesCanBeDuplicatedAcrossLevels() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <group distribution-key=\"0\" name=\"foo\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " </group>", " <group distribution-key=\"0\" name=\"foo\">", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", " </group>", "</cluster>"); ContentClusterUtils.createCluster(xml, new MockRoot()); } @Test void requireThatNestedGroupsRequireDistribution() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"baz\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with missing distribution element"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': 'distribution' attribute is required with multiple subgroups", Exceptions.toMessageString(e)); } } }
class StorageClusterTest { StorageCluster parse(String xml, Flavor flavor) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)).build()); return parse(xml, root); } StorageCluster parse(String xml, Flavor flavor, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .applicationPackage(new MockApplicationPackage.Builder().build()) .modelHostProvisioner(new SingleNodeProvisioner(flavor)) .properties(properties).build()); return parse(xml, root); } StorageCluster parse(String xml, ModelContext.Properties properties) { MockRoot root = new MockRoot("", new DeployState.Builder() .properties(properties) .applicationPackage(new MockApplicationPackage.Builder().build()) .build()); return parse(xml, root); } StorageCluster parse(String xml) { return parse(xml, new TestProperties()); } StorageCluster parse(String xml, MockRoot root) { root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("movies")) ); ContentCluster cluster = ContentClusterUtils.createCluster(xml, root); root.freezeModelTopology(); return cluster.getStorageCluster(); } private static String group() { return joinLines( "<group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", "</group>"); } private static String cluster(String clusterName, String insert) { return joinLines( "<content id=\"" + clusterName + "\">", "<redundancy>3</redundancy>" + "<documents/>", insert, group(), "</content>"); } @Test void testBasics() { StorageCluster storage = parse(cluster("foofighters", "")); assertEquals(1, storage.getChildren().size()); StorServerConfig.Builder builder = new StorServerConfig.Builder(); storage.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertFalse(config.is_distributor()); assertEquals("foofighters", config.cluster_name()); assertEquals(4, config.content_node_bucket_db_stripe_bits()); } @Test void testCommunicationManagerDefaults() { StorageCluster storage = parse(cluster("foofighters", "")); StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); storage.getChildren().get("0").getConfig(builder); StorCommunicationmanagerConfig config = new StorCommunicationmanagerConfig(builder); assertEquals(1, config.mbus().num_network_threads()); } @Test void testMergeDefaults() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", "")).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); assertTrue(config.disable_queue_limits_for_chained_merges()); } @Test void testMerges() { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", joinLines( "<tuning>", " <merges max-per-node=\"1K\" max-queue-size=\"10K\"/>", "</tuning>"))).getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1024, config.max_merges_per_node()); assertEquals(1024 * 10, config.max_merge_queue_size()); } private StorServerConfig configFromProperties(TestProperties properties) { StorServerConfig.Builder builder = new StorServerConfig.Builder(); parse(cluster("foofighters", ""), properties).getConfig(builder); return new StorServerConfig(builder); } private StorCommunicationmanagerConfig communicationmanagerConfigFromProperties(TestProperties properties) { StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); parse(cluster("foofighters", ""), properties).getChildren().values().iterator().next().getConfig(builder); return new StorCommunicationmanagerConfig(builder); } private StorFilestorConfig filestorConfigFromProducer(StorFilestorConfig.Producer producer) { var builder = new StorFilestorConfig.Builder(); producer.getConfig(builder); return new StorFilestorConfig(builder); } @Test void verifyDefaultMbusConfig() { var confg = communicationmanagerConfigFromProperties(new TestProperties()); assertEquals(1, confg.mbus().num_network_threads()); assertEquals(2, confg.mbus().num_rpc_targets()); assertEquals(1, confg.mbus().events_before_wakeup()); assertEquals(2, confg.rpc().num_targets_per_node()); assertEquals(1, confg.rpc().events_before_wakeup()); } @Test void verifyDefaultMbusConfigControl() { var confg = communicationmanagerConfigFromProperties(new TestProperties() .setMbusNetworkThreads(7) .setRpcNumTargets(11) .setRpcEventsBeforeWakeup(12) .setMbusCppRpcNumTargets(8) .setMbusCppEventsBeforeWakeup(9)); assertEquals(7, confg.mbus().num_network_threads()); assertEquals(8, confg.mbus().num_rpc_targets()); assertEquals(9, confg.mbus().events_before_wakeup()); assertEquals(11, confg.rpc().num_targets_per_node()); assertEquals(12, confg.rpc().events_before_wakeup()); } @Test void testMergeFeatureFlags() { var config = configFromProperties(new TestProperties()); assertEquals(16, config.max_merges_per_node()); assertEquals(100, config.max_merge_queue_size()); } @Test void merge_throttling_policy_config_defaults_to_static() { var config = configFromProperties(new TestProperties()); assertEquals(StorServerConfig.Merge_throttling_policy.Type.STATIC, config.merge_throttling_policy().type()); } @Test void assertMergeAutoScaleConfigHasExpectedValues(StorServerConfig.Merge_throttling_memory_limit limit) { assertEquals(128L*1024*1024, limit.auto_lower_bound_bytes()); assertEquals(2L*1024*1024*1024, limit.auto_upper_bound_bytes()); assertEquals(0.03, limit.auto_phys_mem_scale_factor(), 0.000001); } @Test void testVisitors() { StorVisitorConfig.Builder builder = new StorVisitorConfig.Builder(); parse(cluster("bees", joinLines( "<tuning>", " <visitors thread-count=\"7\" max-queue-size=\"1000\">", " <max-concurrent fixed=\"42\" variable=\"100\"/>", " </visitors>", "</tuning>")) ).getConfig(builder); StorVisitorConfig config = new StorVisitorConfig(builder); assertEquals(42, config.maxconcurrentvisitors_fixed()); assertEquals(100, config.maxconcurrentvisitors_variable()); assertEquals(7, config.visitorthreads()); assertEquals(1000, config.maxvisitorqueuesize()); } @Test void testPersistenceThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(7, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); assertEquals(2, config.num_response_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(7, config.num_threads()); } } @Test void testResponseThreads() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads count=\"7\"/>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); var config = filestorConfigFromProducer(stc); assertEquals(2, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.ADAPTIVE, config.response_sequencer_type()); assertEquals(7, config.num_threads()); } @Test void testPersistenceThreadsOld() { StorageCluster stc = parse(cluster("bees", joinLines( "<tuning>", " <persistence-threads>", " <thread lowest-priority=\"VERY_LOW\" count=\"2\"/>", " <thread lowest-priority=\"VERY_HIGH\" count=\"1\"/>", " <thread count=\"1\"/>", " </persistence-threads>", "</tuning>")), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(4, config.num_threads()); assertFalse(config.enable_multibit_split_optimalization()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(4, config.num_threads()); } } @Test void testNoPersistenceThreads() { StorageCluster stc = parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()) ); { var config = filestorConfigFromProducer(stc); assertEquals(8, config.num_threads()); } { assertEquals(1, stc.getChildren().size()); StorageNode sn = stc.getChildren().values().iterator().next(); var config = filestorConfigFromProducer(sn); assertEquals(9, config.num_threads()); } } private StorageCluster simpleCluster(ModelContext.Properties properties) { return parse(cluster("bees", ""), new Flavor(new FlavorsConfig.Flavor.Builder().name("test-flavor").minCpuCores(9).build()), properties); } @Test void testFeatureFlagControlOfResponseSequencer() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setResponseNumThreads(13).setResponseSequencerType("THROUGHPUT"))); assertEquals(13, config.num_response_threads()); assertEquals(StorFilestorConfig.Response_sequencer_type.THROUGHPUT, config.response_sequencer_type()); } private void verifyAsyncMessageHandlingOnSchedule(boolean expected, boolean value) { var config = filestorConfigFromProducer(simpleCluster(new TestProperties().setAsyncMessageHandlingOnSchedule(value))); assertEquals(expected, config.use_async_message_handling_on_schedule()); } @Test void testFeatureFlagControlOfAsyncMessageHandlingOnSchedule() { verifyAsyncMessageHandlingOnSchedule(false, false); verifyAsyncMessageHandlingOnSchedule(true, true); } @Test void persistence_dynamic_throttling_parameters_have_sane_defaults() { var config = filestorConfigFromProducer(simpleCluster(new TestProperties())); assertEquals(StorFilestorConfig.Async_operation_throttler.Type.DYNAMIC, config.async_operation_throttler().type()); assertEquals(1.2, config.async_operation_throttler().window_size_decrement_factor(), 0.0001); assertEquals(0.95, config.async_operation_throttler().window_size_backoff(), 0.0001); assertEquals(20, config.async_operation_throttler().min_window_size()); assertEquals(-1, config.async_operation_throttler().max_window_size()); assertEquals(3.0, config.async_operation_throttler().resize_rate(), 0.0001); assertTrue(config.async_operation_throttler().throttle_individual_merge_feed_ops()); } @Test void testCapacity() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " <node distribution-key=\"1\" hostalias=\"mockhost\" capacity=\"1.5\"/>", " <node distribution-key=\"2\" hostalias=\"mockhost\" capacity=\"2.0\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); for (int i = 0; i < 3; ++i) { StorageNode node = cluster.getStorageCluster().getChildren().get("" + i); StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(1.0 + (double) i * 0.5, config.node_capacity(), 0.001); } } @Test void testRootFolder() { ContentCluster cluster = ContentClusterUtils.createCluster(cluster("storage", ""), new MockRoot()); StorageNode node = cluster.getStorageCluster().getChildren().get("0"); { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getStorageCluster().getConfig(builder); node.getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/storage/0"), config.root_folder()); } { StorServerConfig.Builder builder = new StorServerConfig.Builder(); cluster.getDistributorNodes().getConfig(builder); cluster.getDistributorNodes().getChildren().get("0").getConfig(builder); StorServerConfig config = new StorServerConfig(builder); assertEquals(getDefaults().underVespaHome("var/db/vespa/search/storage/distributor/0"), config.root_folder()); } } @Test void testGenericPersistenceTuning() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", "</cluster>"); ContentCluster cluster = ContentClusterUtils.createCluster(xml, new MockRoot()); PersistenceConfig.Builder builder = new PersistenceConfig.Builder(); cluster.getStorageCluster().getConfig(builder); PersistenceConfig config = new PersistenceConfig(builder); assertTrue(config.fail_partition_on_error()); assertEquals(34 * 60, config.revert_time_period()); assertEquals(5 * 24 * 60 * 60, config.keep_remove_time_period()); } @Test void requireThatUserDoesNotSpecifyBothGroupAndNodes() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <engine>", " <fail-partition-on-error>true</fail-partition-on-error>", " <revert-time>34m</revert-time>", " <recovery-time>5d</recovery-time>", " </engine>", " <group>", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <nodes>", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </nodes>", "</cluster>"); try { final MockRoot root = new MockRoot(); root.getDeployState().getDocumentModel().getDocumentManager().add( new NewDocumentType(new NewDocumentType.Name("music")) ); ContentClusterUtils.createCluster(xml, root); fail("Did not fail when having both group and nodes"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': Both <group> and <nodes> is specified: Only one of these tags can be used in the same configuration", Exceptions.toMessageString(e)); } } @Test void requireThatGroupNamesMustBeUniqueAmongstSiblings() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with duplicate group names"); } catch (RuntimeException e) { assertEquals("Cluster 'storage' has multiple groups with name 'bar' in the same subgroup. " + "Group sibling names must be unique.", e.getMessage()); } } @Test void requireThatGroupNamesCanBeDuplicatedAcrossLevels() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>", " <documents/>", " <group>", " <distribution partitions=\"*\"/>", " <group distribution-key=\"0\" name=\"bar\">", " <group distribution-key=\"0\" name=\"foo\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " </group>", " <group distribution-key=\"0\" name=\"foo\">", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", " </group>", "</cluster>"); ContentClusterUtils.createCluster(xml, new MockRoot()); } @Test void requireThatNestedGroupsRequireDistribution() { String xml = joinLines( "<cluster id=\"storage\">", " <redundancy>2</redundancy>" + " <documents/>", " <group>", " <group distribution-key=\"0\" name=\"bar\">", " <node distribution-key=\"0\" hostalias=\"mockhost\"/>", " </group>", " <group distribution-key=\"0\" name=\"baz\">", " <node distribution-key=\"1\" hostalias=\"mockhost\"/>", " </group>", " </group>", "</cluster>"); try { ContentClusterUtils.createCluster(xml, new MockRoot()); fail("Did not get exception with missing distribution element"); } catch (RuntimeException e) { assertEquals("In content cluster 'storage': 'distribution' attribute is required with multiple subgroups", Exceptions.toMessageString(e)); } } }
Actually, the JsonParser would detect this ...
static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while ( ! buffer.isEmpty()) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } if (buffer.nesting() + 1 != initNesting) throw new IllegalArgumentException("incomplete JSON structure for " + tensorFieldValue); expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); }
throw new IllegalArgumentException("incomplete JSON structure for " + tensorFieldValue);
static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY); return next.token == JsonToken.START_OBJECT; } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY) { } return next.token == JsonToken.START_OBJECT; } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
Reverted.
static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while ( ! buffer.isEmpty()) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } if (buffer.nesting() + 1 != initNesting) throw new IllegalArgumentException("incomplete JSON structure for " + tensorFieldValue); expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); }
throw new IllegalArgumentException("incomplete JSON structure for " + tensorFieldValue);
static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY); return next.token == JsonToken.START_OBJECT; } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY) { } return next.token == JsonToken.START_OBJECT; } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
"te*x*t" blocks, not "test" ... 🤦
public void tensor_modify_update_with_replace_operation() { assertTensorModifyUpdate("{{x:a,y:b}:2.0}", TensorModifyUpdate.Operation.REPLACE, "sparse_tensor", """ { "operation": "replace", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 } ] }"""); }
}""");
public void tensor_modify_update_with_replace_operation() { assertTensorModifyUpdate("{{x:a,y:b}:2.0}", TensorModifyUpdate.Operation.REPLACE, "sparse_tensor", """ { "operation": "replace", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 } ] }"""); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
class JsonReaderTestCase { private DocumentTypeManager types; private JsonFactory parserFactory; @Before public void setUp() throws Exception { parserFactory = new JsonFactory(); types = new DocumentTypeManager(); { DocumentType x = new DocumentType("smoke"); x.addField(new Field("something", DataType.STRING)); x.addField(new Field("nalle", DataType.STRING)); x.addField(new Field("field1", DataType.STRING)); x.addField(new Field("field2", DataType.STRING)); x.addField(new Field("int1", DataType.INT)); x.addField(new Field("flag", DataType.BOOL)); x.addField(new Field("tensor1", DataType.getTensor(TensorType.fromSpec("tensor(x{})")))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("mirrors"); StructDataType woo = new StructDataType("woo"); woo.addField(new Field("sandra", DataType.STRING)); woo.addField(new Field("cloud", DataType.STRING)); x.addField(new Field("skuggsjaa", woo)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testarray"); DataType d = new ArrayDataType(DataType.STRING); x.addField(new Field("actualarray", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testset"); DataType d = new WeightedSetDataType(DataType.STRING, true, true); x.addField(new Field("actualset", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testmap"); DataType d = new MapDataType(DataType.STRING, DataType.STRING); x.addField(new Field("actualmap", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testraw"); DataType d = DataType.RAW; x.addField(new Field("actualraw", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testMapStringToArrayOfInt"); DataType value = new ArrayDataType(DataType.INT); DataType d = new MapDataType(DataType.STRING, value); x.addField(new Field("actualMapStringToArrayOfInt", d)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testArrayOfArrayOfInt"); DataType inner = new ArrayDataType(DataType.INT); DataType outer = new ArrayDataType(inner); x.addField(new Field("arrayOfArrayOfInt", outer)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testsinglepos"); DataType d = PositionDataType.INSTANCE; x.addField(new Field("singlepos", d)); x.addField(new Field("geopos", new GeoPosType(8))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testtensor"); x.addField(new Field("sparse_single_dimension_tensor", new TensorDataType(new TensorType.Builder().mapped("x").build()))); x.addField(new Field("sparse_tensor", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").build()))); x.addField(new Field("dense_tensor", new TensorDataType(new TensorType.Builder().indexed("x", 2).indexed("y", 3).build()))); x.addField(new Field("dense_int8_tensor", new TensorDataType(TensorType.fromSpec("tensor<int8>(x[2],y[3])")))); x.addField(new Field("dense_unbound_tensor", new TensorDataType(new TensorType.Builder().indexed("x").indexed("y").build()))); x.addField(new Field("mixed_tensor", new TensorDataType(new TensorType.Builder().mapped("x").indexed("y", 3).build()))); x.addField(new Field("mixed_bfloat16_tensor", new TensorDataType(TensorType.fromSpec("tensor<bfloat16>(x{},y[3])")))); x.addField(new Field("mixed_tensor_adv", new TensorDataType(new TensorType.Builder().mapped("x").mapped("y").mapped("z").indexed("a", 3).build()))); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testpredicate"); x.addField(new Field("boolean", DataType.PREDICATE)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testint"); x.addField(new Field("integerfield", DataType.INT)); types.registerDocumentType(x); } { DocumentType x = new DocumentType("testnull"); x.addField(new Field("intfield", DataType.INT)); x.addField(new Field("stringfield", DataType.STRING)); x.addField(new Field("arrayfield", new ArrayDataType(DataType.STRING))); x.addField(new Field("weightedsetfield", new WeightedSetDataType(DataType.STRING, true, true))); x.addField(new Field("mapfield", new MapDataType(DataType.STRING, DataType.STRING))); x.addField(new Field("tensorfield", new TensorDataType(new TensorType.Builder().indexed("x").build()))); types.registerDocumentType(x); } } @After public void tearDown() throws Exception { types = null; parserFactory = null; } private JsonReader createReader(String jsonInput) { InputStream input = new ByteArrayInputStream(Utf8.toBytes(jsonInput)); return new JsonReader(types, input, parserFactory); } @Test public void readSingleDocumentPut() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public final void readSingleDocumentUpdate() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof AssignValueUpdate); assertEquals(new StringFieldValue("orOther"), f.getValueUpdate(0).getValue()); } @Test public void readClearField() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "int1": { "assign": null } } } """); FieldUpdate f = doc.getFieldUpdate("int1"); assertEquals(1, f.size()); assertTrue(f.getValueUpdate(0) instanceof ClearValueUpdate); assertNull(f.getValueUpdate(0).getValue()); } @Test public void smokeTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void docIdLookaheadTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); smokeTestDoc(doc); } @Test public void emptyDocTest() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { } }"""); assertEquals(new Document(types.getDocumentType("smoke"), new DocumentId("id:unittest:smoke::whee")), doc); } @Test public void testStruct() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:mirrors::whee", "fields": { "skuggsjaa": { "sandra": "person", "cloud": "another person" } } } """); FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); assertSame(Struct.class, f.getClass()); Struct s = (Struct) f; assertEquals("person", ((StringFieldValue) s.getFieldValue("sandra")).getString()); } private DocumentUpdate parseUpdate(String json) throws IOException { InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(json)); JsonReader r = new JsonReader(types, rawDoc, parserFactory); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); new VespaJsonDocumentReader(false).readUpdate(parseInfo.fieldsBuffer, update); return update; } @Test public void testStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { "sandra": "person", "cloud": "another person" } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(2, s.getFieldCount()); assertEquals(new StringFieldValue("person"), s.getFieldValue(s.getField("sandra"))); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(107, buf.position()); } @Test public final void testEmptyStructUpdate() throws IOException { DocumentUpdate put = parseUpdate(""" { "update": "id:unittest:mirrors:g=test:whee", "create": true, "fields": { "skuggsjaa": { "assign": { } } } } """); assertEquals(1, put.fieldUpdates().size()); FieldUpdate fu = put.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); ValueUpdate vu = fu.getValueUpdate(0); assertTrue(vu instanceof AssignValueUpdate); AssignValueUpdate avu = (AssignValueUpdate) vu; assertTrue(avu.getValue() instanceof Struct); Struct s = (Struct) avu.getValue(); assertEquals(0, s.getFieldCount()); GrowableByteBuffer buf = new GrowableByteBuffer(); DocumentSerializer serializer = DocumentSerializerFactory.create6(buf); put.serialize(serializer); assertEquals(69, buf.position()); } @Test public void testUpdateArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } """); checkSimpleArrayAdd(doc); } @Test public void testUpdateWeighted() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "add": { "person": 37, "another person": 41 } } } } """); Map<String, Integer> weights = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; final String s = ((StringFieldValue) adder.getValue()).getString(); weights.put(s, adder.getWeight()); } assertEquals(2, weights.size()); final String o = "person"; final String o2 = "another person"; assertTrue(weights.containsKey(o)); assertTrue(weights.containsKey(o2)); assertEquals(Integer.valueOf(37), weights.get(o)); assertEquals(Integer.valueOf(41), weights.get(o2)); } @Test public void testUpdateMatch() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "increment": 13 } } } } """); DocumentUpdate otherDoc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "increment": 13, "element": "person" } } } }"""); assertEquals(doc, otherDoc); Map<String, Tuple2<Number, String>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate<?> v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); String op = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperator().toString(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()).getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertEquals("ADD", matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } @SuppressWarnings({ "cast", "unchecked", "rawtypes" }) @Test public void testArithmeticOperators() throws IOException { Tuple2[] operations = new Tuple2[] { new Tuple2<>(UPDATE_DECREMENT, ArithmeticValueUpdate.Operator.SUB), new Tuple2<>(UPDATE_DIVIDE, ArithmeticValueUpdate.Operator.DIV), new Tuple2<>(UPDATE_INCREMENT, ArithmeticValueUpdate.Operator.ADD), new Tuple2<>(UPDATE_MULTIPLY, ArithmeticValueUpdate.Operator.MUL) }; for (Tuple2<String, Operator> operator : operations) { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "match": { "element": "person", "%s": 13 } } } } """.formatted(operator.first)); Map<String, Tuple2<Number, Operator>> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualset"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final String key = ((StringFieldValue) adder.getValue()) .getString(); Operator op = ((ArithmeticValueUpdate) adder .getUpdate()).getOperator(); Number n = ((ArithmeticValueUpdate) adder.getUpdate()) .getOperand(); matches.put(key, new Tuple2<>(n, op)); } assertEquals(1, matches.size()); final String o = "person"; assertSame(operator.second, matches.get(o).second); assertEquals(Double.valueOf(13), matches.get(o).first); } } @SuppressWarnings("rawtypes") @Test public void testArrayIndexing() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "match": { "element": 3, "assign": "nalle" } } } } """); Map<Number, String> matches = new HashMap<>(); FieldUpdate x = doc.getFieldUpdate("actualarray"); for (ValueUpdate v : x.getValueUpdates()) { MapValueUpdate adder = (MapValueUpdate) v; final Number key = ((IntegerFieldValue) adder.getValue()) .getNumber(); String op = ((StringFieldValue) adder.getUpdate() .getValue()).getString(); matches.put(key, op); } assertEquals(1, matches.size()); Number n = Integer.valueOf(3); assertEquals("nalle", matches.get(n)); } @Test public void testDocumentRemove() { JsonReader r = createReader(inputJson("{'remove': 'id:unittest:smoke::whee'}")); DocumentType docType = r.readDocumentType(new DocumentId("id:unittest:smoke::whee")); assertEquals("smoke", docType.getName()); } private Document docFromJson(String json) throws IOException { JsonReader r = createReader(json); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); return put.getDocument(); } @Test public void testWeightedSet() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testset::whee", "fields": { "actualset": { "nalle": 2, "tralle": 7 } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualset")); assertSame(WeightedSet.class, f.getClass()); WeightedSet<?> w = (WeightedSet<?>) f; assertEquals(2, w.size()); assertEquals(Integer.valueOf(2), w.get(new StringFieldValue("nalle"))); assertEquals(Integer.valueOf(7), w.get(new StringFieldValue("tralle"))); } @Test public void testArray() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testarray::whee", "fields": { "actualarray": [ "nalle", "tralle" ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualarray")); assertSame(Array.class, f.getClass()); Array<?> a = (Array<?>) f; assertEquals(2, a.size()); assertEquals(new StringFieldValue("nalle"), a.get(0)); assertEquals(new StringFieldValue("tralle"), a.get(1)); } @Test public void testMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": { "nalle": "kalle", "tralle": "skalle" } } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testOldMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testmap::whee", "fields": { "actualmap": [ { "key": "nalle", "value": "kalle" }, { "key": "tralle", "value": "skalle" } ] } } """); FieldValue f = doc.getFieldValue(doc.getField("actualmap")); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; assertEquals(2, m.size()); assertEquals(new StringFieldValue("kalle"), m.get(new StringFieldValue("nalle"))); assertEquals(new StringFieldValue("skalle"), m.get(new StringFieldValue("tralle"))); } @Test public void testPositionPositive() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionOld() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPosition() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testGeoPositionNoAbbreviations() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": { "latitude": 63.429722, "longitude": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); } @Test public void testPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": "N63.429722;E10.393333" } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testPositionOldGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "x": 10393333, "y": 63429722 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); } @Test public void testGeoPositionGeoPos() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "geopos": { "lat": 63.429722, "lng": 10.393333 } } } """); FieldValue f = doc.getFieldValue(doc.getField("geopos")); assertSame(Struct.class, f.getClass()); assertEquals(10393333, PositionDataType.getXValue(f).getInteger()); assertEquals(63429722, PositionDataType.getYValue(f).getInteger()); assertEquals(f.getDataType(), PositionDataType.INSTANCE); assertEquals(PositionDataType.INSTANCE, f.getDataType()); } @Test public void testPositionNegative() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testsinglepos::bamf", "fields": { "singlepos": "W46.63;S23.55" } } """); FieldValue f = doc.getFieldValue(doc.getField("singlepos")); assertSame(Struct.class, f.getClass()); assertEquals(-46630000, PositionDataType.getXValue(f).getInteger()); assertEquals(-23550000, PositionDataType.getYValue(f).getInteger()); } @Test public void testRaw() throws IOException { String base64 = new String(new JsonStringEncoder().quoteAsString( Base64.getEncoder().withoutPadding().encodeToString(Utf8.toBytes("smoketest")))); String s = fieldStringFromBase64RawContent(base64); assertEquals("smoketest", s); } @Test public void can_read_legacy_chunked_base64_raw_field_encoding() throws IOException { String expected = "this is a string with an impressive length. it's long enough to reach the end of the line, wow!"; String base64withDelims = "dGhpcyBpcyBhIHN0cmluZyB3aXRoIGFuIGltcHJlc3NpdmUgbGVuZ3RoLiBpdCdzIGxvbmcgZW5v\\r\\n" + "dWdoIHRvIHJlYWNoIHRoZSBlbmQgb2YgdGhlIGxpbmUsIHdvdyE=\\r\\n"; assertEquals(expected, fieldStringFromBase64RawContent(base64withDelims)); } private String fieldStringFromBase64RawContent(String base64data) throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testraw::whee", "fields": { "actualraw": "%s" } } """.formatted(base64data)); FieldValue f = doc.getFieldValue(doc.getField("actualraw")); assertSame(Raw.class, f.getClass()); Raw s = (Raw) f; return Utf8.toString(s.getByteBuffer()); } @Test public void testMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldMapStringToArrayOfInt() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": [ { "key": "bamse", "value": [1, 2, 3] } ] } } """); FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); assertSame(MapFieldValue.class, f.getClass()); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) f; Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToString() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:smoke::whee", "fields": { "something": { "assign": "orOther" } } } """); FieldUpdate f = doc.getFieldUpdate("something"); assertEquals(1, f.size()); AssignValueUpdate a = (AssignValueUpdate) f.getValueUpdate(0); assertEquals(new StringFieldValue("orOther"), a.getValue()); } @Test public void testNestedArrayMatch() throws IOException { DocumentUpdate nested = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "element": 1, "match": { "element": 2, "assign": 3 } } } } } """); DocumentUpdate equivalent = parseUpdate(""" { "update": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": { "match": { "match": { "assign": 3, "element": 2 }, "element": 1 } } } } """); assertEquals(nested, equivalent); assertEquals(1, nested.fieldUpdates().size()); FieldUpdate fu = nested.fieldUpdates().iterator().next(); assertEquals(1, fu.getValueUpdates().size()); MapValueUpdate mvu = (MapValueUpdate) fu.getValueUpdate(0); assertEquals(new IntegerFieldValue(1), mvu.getValue()); MapValueUpdate nvu = (MapValueUpdate) mvu.getUpdate(); assertEquals(new IntegerFieldValue(2), nvu.getValue()); AssignValueUpdate avu = (AssignValueUpdate) nvu.getUpdate(); assertEquals(new IntegerFieldValue(3), avu.getValue()); Document doc = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 6] ] } } """); nested.applyTo(doc); Document expected = docFromJson(""" { "put": "id:unittest:testArrayOfArrayOfInt::whee", "fields": { "arrayOfArrayOfInt": [ [1, 2, 3], [4, 5, 3] ] } } """); assertEquals(expected, doc); } @Test public void testMatchCannotUpdateNestedFields() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "match": { "element": 1, "assign": 4 } } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToNestedMap() { assertEquals("Field type Map<string,Array<int>> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "match": { "element": "bamse", "assign": [1, 3, 4] } } } } """)).getMessage()); } @Test public void testMatchCannotAssignToMap() { assertEquals("Field type Map<string,string> not supported.", assertThrows(UnsupportedOperationException.class, () -> parseUpdate(""" { "update": "id:unittest:testmap::whee", "fields": { "actualmap": { "match": { "element": "bamse", "assign": "bar" } } } } """)).getMessage()); } @Test public void testAssignInsideArrayInMap() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "bamse": [1, 2, 3] } } }"""); assertEquals(2, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); DocumentUpdate update = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt{bamse}[1]": { "assign": 4 } } } """); assertEquals(1, update.fieldPathUpdates().size()); update.applyTo(doc); assertEquals(4, ((MapFieldValue<StringFieldValue, Array<IntegerFieldValue>>) doc.getFieldValue("actualMapStringToArrayOfInt")) .get(StringFieldValue.getFactory().create("bamse")).get(1).getInteger()); } @Test public void testAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": { "bamse": [1, 2, 3] } } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testOldAssignToArray() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testMapStringToArrayOfInt::whee", "fields": { "actualMapStringToArrayOfInt": { "assign": [ { "key": "bamse", "value": [1, 2, 3] } ] } } } """); FieldUpdate f = doc.getFieldUpdate("actualMapStringToArrayOfInt"); assertEquals(1, f.size()); AssignValueUpdate assign = (AssignValueUpdate) f.getValueUpdate(0); MapFieldValue<?, ?> m = (MapFieldValue<?, ?>) assign.getValue(); Array<?> a = (Array<?>) m.get(new StringFieldValue("bamse")); assertEquals(3, a.size()); assertEquals(new IntegerFieldValue(1), a.get(0)); assertEquals(new IntegerFieldValue(2), a.get(1)); assertEquals(new IntegerFieldValue(3), a.get(2)); } @Test public void testAssignToWeightedSet() throws IOException { DocumentUpdate doc = parseUpdate(""" { "update": "id:unittest:testset::whee", "fields": { "actualset": { "assign": { "person": 37, "another person": 41 } } } } """); FieldUpdate x = doc.getFieldUpdate("actualset"); assertEquals(1, x.size()); AssignValueUpdate assign = (AssignValueUpdate) x.getValueUpdate(0); WeightedSet<?> w = (WeightedSet<?>) assign.getValue(); assertEquals(2, w.size()); assertEquals(Integer.valueOf(37), w.get(new StringFieldValue("person"))); assertEquals(Integer.valueOf(41), w.get(new StringFieldValue("another person"))); } @Test public void testCompleteFeed() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } @Test public void testCompleteFeedWithCreateAndCondition() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } }, { "condition":"bla", "update": "id:unittest:testarray::whee", "create":true, "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testUpdateWithConditionAndCreateInDifferentOrdering() { int documentsCreated = 106; List<String> parts = Arrays.asList( "\"condition\":\"bla\"", "\"update\": \"id:unittest:testarray::whee\"", " \"fields\": { " + "\"actualarray\": { \"add\": [" + " \"person\",\"another person\"]}}", " \"create\":true"); Random random = new Random(42); StringBuilder documents = new StringBuilder("["); for (int x = 0; x < documentsCreated; x++) { Collections.shuffle(parts, random); documents.append("{").append(Joiner.on(",").join(parts)).append("}"); if (x < documentsCreated -1) { documents.append(","); } } documents.append("]"); InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(documents.toString())); JsonReader r = new JsonReader(types, rawDoc, parserFactory); for (int x = 0; x < documentsCreated; x++) { DocumentUpdate update = (DocumentUpdate) r.next(); checkSimpleArrayAdd(update); assertTrue(update.getCreateIfNonExistent()); assertEquals("bla", update.getCondition().getSelection()); } assertNull(r.next()); } @Test public void testCreateIfNonExistentInPut() { JsonReader r = createReader(""" [ { "create":true, "fields": { "something": "smoketest", "nalle": "bamse" }, "put": "id:unittest:smoke::whee" } ] """); var op = r.next(); var put = (DocumentPut) op; assertTrue(put.getCreateIfNonExistent()); } @Test public void testCompleteFeedWithIdAfterFields() { JsonReader r = createReader(""" [ { "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" }, "put": "id:unittest:smoke::whee" }, { "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "update": "id:unittest:testarray::whee" }, { "remove": "id:unittest:smoke::whee" } ] """); controlBasicFeed(r); } protected void controlBasicFeed(JsonReader r) { DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); smokeTestDoc(doc); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; checkSimpleArrayAdd(update); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } @Test public void testCompleteFeedWithEmptyDoc() { JsonReader r = createReader(""" [ { "put": "id:unittest:smoke::whee", "fields": {} }, { "update": "id:unittest:testarray::whee", "fields": {} }, { "remove": "id:unittest:smoke::whee" } ] """); DocumentOperation d = r.next(); Document doc = ((DocumentPut) d).getDocument(); assertEquals("smoke", doc.getId().getDocType()); d = r.next(); DocumentUpdate update = (DocumentUpdate) d; assertEquals("testarray", update.getId().getDocType()); d = r.next(); DocumentRemove remove = (DocumentRemove) d; assertEquals("smoke", remove.getId().getDocType()); assertNull(r.next()); } private void checkSimpleArrayAdd(DocumentUpdate update) { Set<String> toAdd = new HashSet<>(); FieldUpdate x = update.getFieldUpdate("actualarray"); for (ValueUpdate<?> v : x.getValueUpdates()) { AddValueUpdate adder = (AddValueUpdate) v; toAdd.add(((StringFieldValue) adder.getValue()).getString()); } assertEquals(2, toAdd.size()); assertTrue(toAdd.contains("person")); assertTrue(toAdd.contains("another person")); } private void smokeTestDoc(Document doc) { FieldValue boolField = doc.getFieldValue(doc.getField("flag")); assertSame(BoolFieldValue.class, boolField.getClass()); assertTrue((Boolean)boolField.getWrappedValue()); FieldValue stringField = doc.getFieldValue(doc.getField("nalle")); assertSame(StringFieldValue.class, stringField.getClass()); assertEquals("bamse", ((StringFieldValue) stringField).getString()); } @Test public void nonExistingFieldCausesException() throws IOException { Exception expected = assertThrows(IllegalArgumentException.class, () -> docFromJson(""" { "put": "id:unittest:smoke::whee", "fields": { "smething": "smoketest", "nalle": "bamse" } } """)); assertTrue(expected.getMessage().startsWith("No field 'smething' in the structure of type 'smoke'")); } @Test public void nonExistingFieldsCanBeIgnoredInPut() throws IOException { JsonReader r = createReader(""" { "put": "id:unittest:smoke::doc1", "fields": { "nonexisting1": "ignored value", "field1": "value1", "nonexisting2": { "blocks": { "a": [2.0, 3.0], "b": [4.0, 5.0] } }, "field2": "value2", "nonexisting3": { "cells": [ { "address": { "x": "x1" }, "value": 1.0 } ] }, "tensor1": { "cells": { "x1": 1.0 } }, "nonexisting4": "ignored value" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(true).readPut(parseInfo.fieldsBuffer, put); assertFalse(fullyApplied); assertNull(put.getDocument().getField("nonexisting1")); assertEquals("value1", put.getDocument().getFieldValue("field1").toString()); assertNull(put.getDocument().getField("nonexisting2")); assertEquals("value2", put.getDocument().getFieldValue("field2").toString()); assertNull(put.getDocument().getField("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), put.getDocument().getFieldValue("tensor1").getWrappedValue()); assertNull(put.getDocument().getField("nonexisting4")); } @Test public void nonExistingFieldsCanBeIgnoredInUpdate() throws IOException{ JsonReader r = createReader(""" { "update": "id:unittest:smoke::doc1", "fields": { "nonexisting1": { "assign": "ignored value" }, "field1": { "assign": "value1" }, "nonexisting2": { "assign": { "blocks": { "a":[2.0,3.0], "b":[4.0,5.0] } } }, "field2": { "assign": "value2" }, "nonexisting3": { "assign" : { "cells": [{"address": {"x": "x1"}, "value": 1.0}] } }, "tensor1": {"assign": { "cells": {"x1": 1.0} } }, "nonexisting4": { "assign": "ignored value" } } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentUpdate update = new DocumentUpdate(docType, parseInfo.documentId); boolean fullyApplied = new VespaJsonDocumentReader(true).readUpdate(parseInfo.fieldsBuffer, update); assertFalse(fullyApplied); assertNull(update.getFieldUpdate("nonexisting1")); assertEquals("value1", update.getFieldUpdate("field1").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting2")); assertEquals("value2", update.getFieldUpdate("field2").getValueUpdates().get(0).getValue().getWrappedValue().toString()); assertNull(update.getFieldUpdate("nonexisting3")); assertEquals(Tensor.from("tensor(x{}):{{x:x1}:1.0}"), update.getFieldUpdate("tensor1").getValueUpdates().get(0).getValue().getWrappedValue()); assertNull(update.getFieldUpdate("nonexisting4")); } @Test public void feedWithBasicErrorTest() { JsonReader r = createReader(""" [ { "put": "id:test:smoke::0", "fields": { "something": "foo" } }, { "put": "id:test:smoke::1", "fields": { "something": "foo" } }, { "put": "id:test:smoke::2", "fields": { "something": "foo" } }, ]"""); assertTrue(assertThrows(RuntimeException.class, () -> { while (r.next() != null); }) .getMessage().contains("JsonParseException")); } @Test public void idAsAliasForPutTest() throws IOException{ JsonReader r = createReader(""" { "id": "id:unittest:smoke::doc1", "fields": { "something": "smoketest", "flag": true, "nalle": "bamse" } } """); DocumentParseInfo parseInfo = r.parseDocument().get(); DocumentType docType = r.readDocumentType(parseInfo.documentId); DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); boolean fullyApplied = new VespaJsonDocumentReader(false).readPut(parseInfo.fieldsBuffer, put); assertTrue(fullyApplied); smokeTestDoc(put.getDocument()); } private void testFeedWithTestAndSetCondition(String jsonDoc) { ByteArrayInputStream parseInfoDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc)); JsonReader reader = new JsonReader(types, parseInfoDoc, parserFactory); int NUM_OPERATIONS_IN_FEED = 3; for (int i = 0; i < NUM_OPERATIONS_IN_FEED; i++) { DocumentOperation operation = reader.next(); assertTrue("A test and set condition should be present", operation.getCondition().isPresent()); assertEquals("DocumentOperation's test and set condition should be equal to the one in the JSON feed", "smoke.something == \"smoketest\"", operation.getCondition().getSelection()); } assertNull(reader.next()); } @Test public void testFeedWithTestAndSetConditionOrderingOne() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "update": "id:unittest:testarray::whee", "condition": "smoke.something == \\"smoketest\\"", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingTwo() { testFeedWithTestAndSetCondition(""" [ { "condition": "smoke.something == \\"smoketest\\"", "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" } }, { "condition": "smoke.something == \\"smoketest\\"", "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } } }, { "condition": "smoke.something == \\"smoketest\\"", "remove": "id:unittest:smoke::whee" } ] """); } @Test public void testFeedWithTestAndSetConditionOrderingThree() { testFeedWithTestAndSetCondition(""" [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "condition": "smoke.something == \\"smoketest\\"" }, { "update": "id:unittest:testarray::whee", "fields": { "actualarray": { "add": [ "person", "another person" ] } }, "condition": "smoke.something == \\"smoketest\\"" }, { "remove": "id:unittest:smoke::whee", "condition": "smoke.something == \\"smoketest\\"" } ] """); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldAfterFieldsFieldShouldFailParse() { String jsonData = """ [ { "put": "id:unittest:smoke::whee", "fields": { "something": "smoketest", "nalle": "bamse" }, "bjarne": "stroustrup" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldBeforeFieldsFieldShouldFailParse() { String jsonData = """ [ { "update": "id:unittest:testarray::whee", "what is this": "nothing to see here", "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFieldWithoutFieldsFieldShouldFailParse() { String jsonData = """ [ { "remove": "id:unittest:smoke::whee", "what is love": "baby, do not hurt me... much } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); } @Test public void testMissingOperation() { try { String jsonData = """ [ { "fields": { "actualarray": { "add": [ "person", "another person" ] } } } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Missing a document operation ('put', 'update' or 'remove')", e.getMessage()); } } @Test public void testMissingFieldsMapInPut() { try { String jsonData = """ [ { "put": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("put of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testMissingFieldsMapInUpdate() { try { String jsonData = """ [ { "update": "id:unittest:smoke::whee" } ]"""; new JsonReader(types, jsonToInputStream(jsonData), parserFactory).next(); fail("Expected exception"); } catch (IllegalArgumentException e) { assertEquals("Update of document id:unittest:smoke::whee is missing a 'fields' map", e.getMessage()); } } @Test public void testNullValues() throws IOException { Document doc = docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "intfield": null, "stringfield": null, "arrayfield": null, "weightedsetfield": null, "mapfield": null, "tensorfield": null } } """); assertFieldValueNull(doc, "intfield"); assertFieldValueNull(doc, "stringfield"); assertFieldValueNull(doc, "arrayfield"); assertFieldValueNull(doc, "weightedsetfield"); assertFieldValueNull(doc, "mapfield"); assertFieldValueNull(doc, "tensorfield"); } @Test(expected=JsonReaderException.class) public void testNullArrayElement() throws IOException { docFromJson(""" { "put": "id:unittest:testnull::doc1", "fields": { "arrayfield": [ null ] } } """); fail(); } private void assertFieldValueNull(Document doc, String fieldName) { Field field = doc.getField(fieldName); assertNotNull(field); FieldValue fieldValue = doc.getFieldValue(field); assertNull(fieldValue); } static ByteArrayInputStream jsonToInputStream(String json) { return new ByteArrayInputStream(Utf8.toBytes(json)); } @Test public void testParsingWithoutTensorField() { Document doc = createPutWithoutTensor().getDocument(); assertEquals("testtensor", doc.getId().getDocType()); assertEquals("id:unittest:testtensor::0", doc.getId().toString()); TensorFieldValue fieldValue = (TensorFieldValue)doc.getFieldValue(doc.getField("sparse_tensor")); assertNull(fieldValue); } @Test public void testParsingOfEmptyTensor() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor("{}")); } @Test public void testParsingOfTensorWithEmptyCells() { assertSparseTensorField("tensor(x{},y{}):{}", createPutWithSparseTensor(inputJson("{ 'cells': [] }"))); } @Test public void testDisallowedDenseTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{ 'values': [] }"), "dense_tensor", "The 'values' array does not contain any values"); assertCreatePutFails(inputJson("{ 'values': '' }"), "dense_tensor", "The 'values' string does not contain any values"); } @Test public void testDisallowedMixedTensorShortFormWithoutValues() { assertCreatePutFails(inputJson("{\"blocks\":{ \"a\": [] } }"), "mixed_tensor", "Expected 3 values, but got 0"); assertCreatePutFails(inputJson("{\"blocks\":[ {\"address\":{\"x\":\"a\"}, \"values\": [] } ] }"), "mixed_tensor", "Expected 3 values, but got 0"); } @Test public void testParsingOfSparseTensorWithCells() { Tensor tensor = assertSparseTensorField("{{x:a,y:b}:2.0,{x:c,y:b}:3.0}}", createPutWithSparseTensor(""" { "type": "tensor(x{},y{})", "cells": [ { "address": { "x": "a", "y": "b" }, "value": 2.0 }, { "address": { "x": "c", "y": "b" }, "value": 3.0 } ] } """)); assertTrue(tensor instanceof MappedTensor); }
With sufficient self-medication I can live with the nested assignment here, but consider adding an empty `{}` body to the `while` statement to avoid the classical footguns associated with trailing semicolons flying in stealth mode.
private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY); return next.token == JsonToken.START_OBJECT; }
while ((next = lookahead.get()).token == JsonToken.START_ARRAY);
private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY) { } return next.token == JsonToken.START_OBJECT; }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); } static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); } static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
Consider breaking this assignment out. I find it very easy to accidentally miss such data flows, in particular when the `if` is visually structured as a guard-style statement.
public Supplier<Token> lookahead() { return new Supplier<>() { int localNesting = nesting(); Supplier<Token> buffered = LazyTokenBuffer.super.lookahead(); @Override public Token get() { if (localNesting == 0) return null; Token token = buffered.get(); if (token == null) tokens.add(token = nextToken()); localNesting += nestingOffset(token.token); return token; } }; }
if (token == null) tokens.add(token = nextToken());
public Supplier<Token> lookahead() { return new Supplier<>() { int localNesting = nesting(); Supplier<Token> buffered = LazyTokenBuffer.super.lookahead(); @Override public Token get() { if (localNesting == 0) return null; Token token = buffered.get(); if (token == null) { token = nextToken(); tokens.add(token); } localNesting += nestingOffset(token.token); return token; } }; }
class LazyTokenBuffer extends TokenBuffer { private final JsonParser parser; public LazyTokenBuffer(JsonParser parser) { this.parser = parser; try { addFromParser(parser); } catch (IOException e) { throw new IllegalArgumentException("failed parsing document JSON", e); } if (JsonToken.START_OBJECT != current()) throw new IllegalArgumentException("expected start of JSON object, but got " + current()); updateNesting(current()); } void advance() { super.advance(); if (tokens.isEmpty() && nesting() > 0) tokens.add(nextToken()); } @Override private Token nextToken() { try { JsonToken token = parser.nextValue(); if (token == null) throw new IllegalStateException("no more JSON tokens"); return new Token(token, parser.getCurrentName(), parser.getText()); } catch (IOException e) { throw new IllegalArgumentException("failed reading document JSON", e); } } }
class LazyTokenBuffer extends TokenBuffer { private final JsonParser parser; public LazyTokenBuffer(JsonParser parser) { this.parser = parser; try { addFromParser(parser); } catch (IOException e) { throw new IllegalArgumentException("failed parsing document JSON", e); } if (JsonToken.START_OBJECT != current()) throw new IllegalArgumentException("expected start of JSON object, but got " + current()); updateNesting(current()); } void advance() { super.advance(); if (tokens.isEmpty() && nesting() > 0) tokens.add(nextToken()); } @Override private Token nextToken() { try { JsonToken token = parser.nextValue(); if (token == null) throw new IllegalStateException("no more JSON tokens"); return new Token(token, parser.getCurrentName(), parser.getText()); } catch (IOException e) { throw new IllegalArgumentException("failed reading document JSON", e); } } }
```suggestion while ((next = lookahead.get()).token == JsonToken.START_ARRAY) { } ```
private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY); return next.token == JsonToken.START_OBJECT; }
while ((next = lookahead.get()).token == JsonToken.START_ARRAY);
private static boolean isArrayOfObjects(TokenBuffer buffer) { if (buffer.current() != JsonToken.START_ARRAY) return false; Supplier<Token> lookahead = buffer.lookahead(); Token next; while ((next = lookahead.get()).token == JsonToken.START_ARRAY) { } return next.token == JsonToken.START_OBJECT; }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); } static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
class TensorReader { public static final String TENSOR_TYPE = "type"; public static final String TENSOR_ADDRESS = "address"; public static final String TENSOR_CELLS = "cells"; public static final String TENSOR_VALUES = "values"; public static final String TENSOR_BLOCKS = "blocks"; public static final String TENSOR_VALUE = "value"; static void fillTensor(TokenBuffer buffer, TensorFieldValue tensorFieldValue) { Tensor.Builder builder = Tensor.Builder.of(tensorFieldValue.getDataType().getTensorType()); expectOneOf(buffer.current(), JsonToken.START_OBJECT, JsonToken.START_ARRAY); int initNesting = buffer.nesting(); while (true) { Supplier<Token> lookahead = buffer.lookahead(); Token next = lookahead.get(); if (TENSOR_CELLS.equals(next.name) && ! primitiveContent(next.token, lookahead.get().token)) { buffer.next(); readTensorCells(buffer, builder); } else if (TENSOR_VALUES.equals(next.name) && builder.type().dimensions().stream().allMatch(Dimension::isIndexed)) { buffer.next(); readTensorValues(buffer, builder); } else if (TENSOR_BLOCKS.equals(next.name)) { buffer.next(); readTensorBlocks(buffer, builder); } else if (TENSOR_TYPE.equals(next.name) && next.token == JsonToken.VALUE_STRING) { buffer.next(); } else if (buffer.nesting() == initNesting && JsonToken.END_OBJECT == next.token) { buffer.next(); break; } else { readDirectTensorValue(buffer, builder); break; } } expectOneOf(buffer.current(), JsonToken.END_OBJECT, JsonToken.END_ARRAY); tensorFieldValue.assign(builder.build()); } static boolean primitiveContent(JsonToken current, JsonToken next) { if (current.isScalarValue()) return true; if (current == JsonToken.START_ARRAY) { if (next == JsonToken.END_ARRAY) return false; if (next.isScalarValue()) return true; } return false; } static void readTensorCells(TokenBuffer buffer, Tensor.Builder builder) { if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorCell(buffer, builder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.cell(asAddress(buffer.currentName(), builder.type()), readDouble(buffer)); } else { throw new IllegalArgumentException("Expected 'cells' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorCell(TokenBuffer buffer, Tensor.Builder builder) { expectObjectStart(buffer.current()); TensorAddress address = null; Double value = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) { address = readAddress(buffer, builder.type()); } else if (TensorReader.TENSOR_VALUE.equals(currentName)) { value = readDouble(buffer); } } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain an 'address' field"); if (value == null) throw new IllegalArgumentException("Expected an object in a tensor 'cells' array to contain a 'value' field"); builder.cell(address, value); } private static void readTensorValues(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof IndexedTensor.BoundBuilder indexedBuilder)) throw new IllegalArgumentException("The 'values' field can only be used with dense tensors. " + "Use 'cells' or 'blocks' instead"); if (buffer.current() == JsonToken.VALUE_STRING) { double[] decoded = decodeHexString(buffer.currentText(), builder.type().valueType()); if (decoded.length == 0) throw new IllegalArgumentException("The 'values' string does not contain any values"); for (int i = 0; i < decoded.length; i++) { indexedBuilder.cellByDirectIndex(i, decoded[i]); } return; } int index = 0; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; indexedBuilder.cellByDirectIndex(index++, readDouble(buffer)); } if (index == 0) throw new IllegalArgumentException("The 'values' array does not contain any values"); expectCompositeEnd(buffer.current()); } static void readTensorBlocks(TokenBuffer buffer, Tensor.Builder builder) { if ( ! (builder instanceof MixedTensor.BoundBuilder mixedBuilder)) throw new IllegalArgumentException("The 'blocks' field can only be used with mixed tensors with bound dimensions. " + "Use 'cells' or 'values' instead"); if (buffer.current() == JsonToken.START_ARRAY) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) readTensorBlock(buffer, mixedBuilder); } else if (buffer.current() == JsonToken.START_OBJECT) { int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { TensorAddress mappedAddress = asAddress(buffer.currentName(), builder.type().mappedSubtype()); mixedBuilder.block(mappedAddress, readValues(buffer, (int) mixedBuilder.denseSubspaceSize(), mappedAddress, mixedBuilder.type())); } } else { throw new IllegalArgumentException("Expected 'blocks' to contain an array or an object, but got " + buffer.current()); } expectCompositeEnd(buffer.current()); } private static void readTensorBlock(TokenBuffer buffer, MixedTensor.BoundBuilder mixedBuilder) { expectObjectStart(buffer.current()); TensorAddress address = null; double[] values = null; int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { String currentName = buffer.currentName(); if (TensorReader.TENSOR_ADDRESS.equals(currentName)) address = readAddress(buffer, mixedBuilder.type().mappedSubtype()); else if (TensorReader.TENSOR_VALUES.equals(currentName)) values = readValues(buffer, (int)mixedBuilder.denseSubspaceSize(), address, mixedBuilder.type()); } expectObjectEnd(buffer.current()); if (address == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an object 'address'"); if (values == null) throw new IllegalArgumentException("Expected a 'blocks' array object to contain an array 'values'"); mixedBuilder.block(address, values); } /** Reads a tensor value directly at the root, where the format is decided by the tensor type. */ private static void readDirectTensorValue(TokenBuffer buffer, Tensor.Builder builder) { boolean hasIndexed = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isIndexed); boolean hasMapped = builder.type().dimensions().stream().anyMatch(TensorType.Dimension::isMapped); if (isArrayOfObjects(buffer)) readTensorCells(buffer, builder); else if ( ! hasMapped) readTensorValues(buffer, builder); else if (hasMapped && hasIndexed) readTensorBlocks(buffer, builder); else readTensorCells(buffer, builder); } private static TensorAddress readAddress(TokenBuffer buffer, TensorType type) { expectObjectStart(buffer.current()); TensorAddress.Builder builder = new TensorAddress.Builder(type); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) builder.add(buffer.currentName(), buffer.currentText()); expectObjectEnd(buffer.current()); return builder.build(); } /** * Reads values for a tensor subspace block * * @param buffer the buffer containing the values * @param size the expected number of values * @param address the address for the block for error reporting, or null if not known * @param type the type of the tensor we are reading * @return the values read */ private static double[] readValues(TokenBuffer buffer, int size, TensorAddress address, TensorType type) { int index = 0; double[] values = new double[size]; if (buffer.current() == JsonToken.VALUE_STRING) { values = decodeHexString(buffer.currentText(), type.valueType()); index = values.length; } else { expectArrayStart(buffer.current()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (buffer.current() == JsonToken.START_ARRAY || buffer.current() == JsonToken.END_ARRAY) continue; values[index++] = readDouble(buffer); } expectCompositeEnd(buffer.current()); } if (index != size) throw new IllegalArgumentException((address != null ? "At " + address.toString(type) + ": " : "") + "Expected " + size + " values, but got " + index); return values; } private static double readDouble(TokenBuffer buffer) { try { if (buffer.current() == JsonToken.VALUE_STRING) { return decodeNumberString(buffer.currentText()); } return Double.parseDouble(buffer.currentText()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Expected a number but got '" + buffer.currentText() + "'"); } } private static TensorAddress asAddress(String label, TensorType type) { if (type.dimensions().size() != 1) throw new IllegalArgumentException("Expected a tensor with a single dimension but got '" + type + "'"); return new TensorAddress.Builder(type).add(type.dimensions().get(0).name(), label).build(); } }
```suggestion if (token == null) { token = nextToken(); tokens.add(token); } ```
public Supplier<Token> lookahead() { return new Supplier<>() { int localNesting = nesting(); Supplier<Token> buffered = LazyTokenBuffer.super.lookahead(); @Override public Token get() { if (localNesting == 0) return null; Token token = buffered.get(); if (token == null) tokens.add(token = nextToken()); localNesting += nestingOffset(token.token); return token; } }; }
if (token == null) tokens.add(token = nextToken());
public Supplier<Token> lookahead() { return new Supplier<>() { int localNesting = nesting(); Supplier<Token> buffered = LazyTokenBuffer.super.lookahead(); @Override public Token get() { if (localNesting == 0) return null; Token token = buffered.get(); if (token == null) { token = nextToken(); tokens.add(token); } localNesting += nestingOffset(token.token); return token; } }; }
class LazyTokenBuffer extends TokenBuffer { private final JsonParser parser; public LazyTokenBuffer(JsonParser parser) { this.parser = parser; try { addFromParser(parser); } catch (IOException e) { throw new IllegalArgumentException("failed parsing document JSON", e); } if (JsonToken.START_OBJECT != current()) throw new IllegalArgumentException("expected start of JSON object, but got " + current()); updateNesting(current()); } void advance() { super.advance(); if (tokens.isEmpty() && nesting() > 0) tokens.add(nextToken()); } @Override private Token nextToken() { try { JsonToken token = parser.nextValue(); if (token == null) throw new IllegalStateException("no more JSON tokens"); return new Token(token, parser.getCurrentName(), parser.getText()); } catch (IOException e) { throw new IllegalArgumentException("failed reading document JSON", e); } } }
class LazyTokenBuffer extends TokenBuffer { private final JsonParser parser; public LazyTokenBuffer(JsonParser parser) { this.parser = parser; try { addFromParser(parser); } catch (IOException e) { throw new IllegalArgumentException("failed parsing document JSON", e); } if (JsonToken.START_OBJECT != current()) throw new IllegalArgumentException("expected start of JSON object, but got " + current()); updateNesting(current()); } void advance() { super.advance(); if (tokens.isEmpty() && nesting() > 0) tokens.add(nextToken()); } @Override private Token nextToken() { try { JsonToken token = parser.nextValue(); if (token == null) throw new IllegalStateException("no more JSON tokens"); return new Token(token, parser.getCurrentName(), parser.getText()); } catch (IOException e) { throw new IllegalArgumentException("failed reading document JSON", e); } } }
`"create"` and `"condition"` fields now required to be of the correct type, instead of mismatches being silently ignored, as before. Other fields are still silently ignored. Also, I don't like that create and condition can be supplied both here, and through HTTP headers, for `/document/v1/`.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
}
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
I think Java assertions are generally disabled in production, so this will likely be a no-op in practice. Consider replacing with an explicit check + your friendly neighborhood `IllegalStateException`
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
Also, wouldn't it be possible to hit this case simply by omitting the `fields` object from the JSON payload? If so, it should probably be handled like an `IllegalArgumentException` rather than an invariant violation.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
Ah the point was that the VespaDocumentReader already does this check. There are unit tests for this. If this ever changes, the unit tests will also fail, because the exception is now an assertion error, instead of the excpted IAEx.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
I guess I could equally well have used a comment ... but that's a weaker statement.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
`VespaDocumentJsonReader` does check this, but from what I can see that particular code (`VespaDocumentJsonReader.readPut()` presumably, correct me if I'm wrong) will only be invoked _iff_ there is a `fields` object present in the first place. I.e. we have hit the `case FIELDS ->` above as part of parsing the JSON object, which is where we instantiate the reader. I may simply be confused and not quite see where/how we have transitively ensured the presence of the `fields` object.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
Ah, no, you're right. It wasn't originally like that, but now it is.
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); assert null != operation: "VespaDocumentReader should throw on missing fields"; if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
assert null != operation: "VespaDocumentReader should throw on missing fields";
public ParsedDocumentOperation readSingleDocumentStreaming(DocumentOperationType operationType, String docIdString) { try { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo = new DocumentParseInfo(); documentParseInfo.documentId = docId; documentParseInfo.operationType = operationType; if (JsonToken.START_OBJECT != parser.nextValue()) throw new IllegalArgumentException("expected start of root object, got " + parser.currentToken()); Boolean create = null; String condition = null; ParsedDocumentOperation operation = null; while (JsonToken.END_OBJECT != parser.nextValue()) { switch (parser.getCurrentName()) { case FIELDS -> { documentParseInfo.fieldsBuffer = new LazyTokenBuffer(parser); VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); if ( ! documentParseInfo.fieldsBuffer.isEmpty()) throw new IllegalArgumentException("expected all content to be consumed by document parsing, but " + documentParseInfo.fieldsBuffer.nesting() + " levels remain"); } case CONDITION -> { if ( ! JsonToken.VALUE_STRING.equals(parser.currentToken()) && ! JsonToken.VALUE_NULL.equals(parser.currentToken())) throw new IllegalArgumentException("expected string value for condition, got " + parser.currentToken()); condition = parser.getValueAsString(); } case CREATE_IF_NON_EXISTENT -> { create = parser.getBooleanValue(); } default -> { if (parser.currentToken().isStructStart()) parser.skipChildren(); } } } if (null != parser.nextToken()) throw new IllegalArgumentException("expected end of input, got " + parser.currentToken()); if (null == operation) throw new IllegalArgumentException("document is missing the required \"fields\" field"); if (null != create) { switch (operationType) { case PUT -> ((DocumentPut) operation.operation()).setCreateIfNonExistent(create); case UPDATE -> ((DocumentUpdate) operation.operation()).setCreateIfNonExistent(create); case REMOVE -> throw new IllegalArgumentException(CREATE_IF_NON_EXISTENT + " is not supported for remove operations"); } } operation.operation().setCondition(TestAndSetCondition.fromConditionString(Optional.ofNullable(condition))); return operation; } catch (IOException e) { throw new IllegalArgumentException("failed parsing document", e); } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
class JsonReader { private final JsonParser parser; private final DocumentTypeManager typeManager; private ReaderState state = ReaderState.AT_START; enum ReaderState { AT_START, READING, END_OF_FEED } public JsonReader(DocumentTypeManager typeManager, InputStream input, JsonFactory parserFactory) { this.typeManager = typeManager; try { parser = parserFactory.createParser(input); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } public Optional<DocumentParseInfo> parseDocument() throws IOException { DocumentParser documentParser = new DocumentParser(parser); return documentParser.parse(Optional.empty()); } /** * Reads a single operation. The operation is not expected to be part of an array. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ ParsedDocumentOperation readSingleDocument(DocumentOperationType operationType, String docIdString) { DocumentId docId = new DocumentId(docIdString); DocumentParseInfo documentParseInfo; try { DocumentParser documentParser = new DocumentParser(parser); documentParseInfo = documentParser.parse(Optional.of(docId)).get(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } documentParseInfo.operationType = operationType; VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); ParsedDocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.documentId.getDocType(), typeManager), documentParseInfo); operation.operation().setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.condition)); return operation; } /** * Reads a JSON which is expected to contain a single document operation, * and where other parameters, like the document ID and operation type, are supplied by other means. * * @param operationType the type of operation (update or put) * @param docIdString document ID * @return the parsed document operation */ /** Returns the next document operation, or null if we have reached the end */ public DocumentOperation next() { switch (state) { case AT_START: JsonToken t = nextToken(parser); expectArrayStart(t); state = ReaderState.READING; break; case END_OF_FEED: return null; case READING: break; } Optional<DocumentParseInfo> documentParseInfo; try { documentParseInfo = parseDocument(); } catch (IOException r) { state = END_OF_FEED; throw new IllegalArgumentException(r); } if ( ! documentParseInfo.isPresent()) { state = END_OF_FEED; return null; } VespaJsonDocumentReader vespaJsonDocumentReader = new VespaJsonDocumentReader(typeManager.getIgnoreUndefinedFields()); DocumentOperation operation = vespaJsonDocumentReader.createDocumentOperation( getDocumentTypeFromString(documentParseInfo.get().documentId.getDocType(), typeManager), documentParseInfo.get()).operation(); operation.setCondition(TestAndSetCondition.fromConditionString(documentParseInfo.get().condition)); return operation; } public DocumentType readDocumentType(DocumentId docId) { return getDocumentTypeFromString(docId.getDocType(), typeManager); } private static DocumentType getDocumentTypeFromString(String docTypeString, DocumentTypeManager typeManager) { final DocumentType docType = typeManager.getDocumentType(docTypeString); if (docType == null) throw new IllegalArgumentException(String.format("Document type %s does not exist", docTypeString)); return docType; } public JsonToken nextToken(JsonParser parser) { try { return parser.nextValue(); } catch (IOException e) { state = END_OF_FEED; throw new IllegalArgumentException(e); } } }
Nit: missing spaces (unless this is intentional for visual grouping)
void test_that_redundancy_is_rounded_up() { verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 5), 1,5); verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 4), 1,4); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 10), 2,10); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2,9); }
verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2,9);
void test_that_redundancy_is_rounded_up() { verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 5), 1, 5); verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 4), 1, 4); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 10), 2, 10); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2, 9); }
class RedundancyTest { @Test void effectively_globally_distributed_is_correct() { assertFalse(createRedundancy(4, 2, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 1, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 2, 12).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 2, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 3, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(1, 1, 1).isEffectivelyGloballyDistributed()); } private static Redundancy createRedundancy(int redundancy, int implicitGroups, int totalNodes) { Redundancy r = new Redundancy(1, redundancy, 1, implicitGroups, totalNodes); return r; } private static void verifyFinalRedundancy(Redundancy redundancy, int expectedFinal, int expectedEffectiveFinal) { assertEquals(expectedEffectiveFinal, redundancy.effectiveFinalRedundancy()); assertEquals(expectedFinal, redundancy.finalRedundancy()); assertEquals(expectedEffectiveFinal, redundancy.effectiveReadyCopies()); assertEquals(expectedFinal, redundancy.readyCopies()); } @Test }
class RedundancyTest { @Test void effectively_globally_distributed_is_correct() { assertFalse(createRedundancy(4, 2, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 1, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 2, 12).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 2, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 3, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(1, 1, 1).isEffectivelyGloballyDistributed()); } private static Redundancy createRedundancy(int redundancy, int implicitGroups, int totalNodes) { Redundancy r = new Redundancy(1, redundancy, 1, implicitGroups, totalNodes); return r; } private static void verifyFinalRedundancy(Redundancy redundancy, int expectedFinal, int expectedEffectiveFinal) { assertEquals(expectedEffectiveFinal, redundancy.effectiveFinalRedundancy()); assertEquals(expectedFinal, redundancy.finalRedundancy()); assertEquals(expectedEffectiveFinal, redundancy.effectiveReadyCopies()); assertEquals(expectedFinal, redundancy.readyCopies()); } @Test }
This seems better for AWS. There is still one discrepancy: AmazonRoute53ClientImpl allows `#IPv4 addresses = 1` for a tenant host, and in this case the empty set should be returned here. Not sure if this is possible. With Andreas' new inclave setup? In any case, until AR53CI is refactored to use this class it's not that important to resolve. It could be ignored or left as a TODO.
public static Set<RecordType> recordTypesFor(IP.Version ipVersion, NodeType hostType, CloudName cloudName, boolean enclave) { if (cloudName == CloudName.AWS || cloudName == CloudName.GCP) { if (enclave) { return ipVersion.is6() ? EnumSet.of(RecordType.FORWARD, RecordType.PUBLIC_FORWARD) : EnumSet.noneOf(RecordType.class); } else { return hostType == confighost && ipVersion.is6() ? EnumSet.of(RecordType.FORWARD, RecordType.REVERSE, RecordType.PUBLIC_FORWARD) : EnumSet.of(RecordType.FORWARD, RecordType.REVERSE); } } if (cloudName == CloudName.AZURE) { return ipVersion.is6() ? EnumSet.noneOf(RecordType.class) : enclave || hostType == confighost ? EnumSet.of(RecordType.FORWARD, RecordType.PUBLIC_FORWARD) : EnumSet.of(RecordType.FORWARD); } throw new IllegalArgumentException("Does not manage DNS for cloud " + cloudName); }
if (cloudName == CloudName.AWS || cloudName == CloudName.GCP) {
public static Set<RecordType> recordTypesFor(IP.Version ipVersion, NodeType hostType, CloudName cloudName, boolean enclave) { if (cloudName == CloudName.AWS || cloudName == CloudName.GCP) { if (enclave) { return ipVersion.is6() ? EnumSet.of(RecordType.FORWARD, RecordType.PUBLIC_FORWARD) : EnumSet.noneOf(RecordType.class); } else { return hostType == confighost && ipVersion.is6() ? EnumSet.of(RecordType.FORWARD, RecordType.REVERSE, RecordType.PUBLIC_FORWARD) : EnumSet.of(RecordType.FORWARD, RecordType.REVERSE); } } if (cloudName == CloudName.AZURE) { return ipVersion.is6() ? EnumSet.noneOf(RecordType.class) : enclave || hostType == confighost ? EnumSet.of(RecordType.FORWARD, RecordType.PUBLIC_FORWARD) : EnumSet.of(RecordType.FORWARD); } throw new IllegalArgumentException("Does not manage DNS for cloud " + cloudName); }
class Dns { private Dns() {} public enum RecordType { FORWARD, PUBLIC_FORWARD, REVERSE } /** Returns the set of DNS record types for a host and its children and the given version (ipv6), host type, etc. */ /** Verify DNS configuration of given hostname and IP address */ public static void verify(String hostname, String ipAddress, NodeType nodeType, NameResolver resolver, CloudAccount cloudAccount, Zone zone) { IP.Version version = IP.Version.fromIpAddress(ipAddress); Set<RecordType> recordTypes = recordTypesFor(version, nodeType, zone.cloud().name(), cloudAccount.isEnclave(zone)); if (recordTypes.contains(RecordType.FORWARD)) { NameResolver.RecordType recordType = version.is6() ? NameResolver.RecordType.AAAA : NameResolver.RecordType.A; Set<String> addresses = resolver.resolve(hostname, recordType); if (!addresses.equals(java.util.Set.of(ipAddress))) throw new IllegalArgumentException("Expected " + hostname + " to resolve to " + ipAddress + ", but got " + addresses); } if (recordTypes.contains(RecordType.REVERSE)) { Optional<String> reverseHostname = resolver.resolveHostname(ipAddress); if (reverseHostname.isEmpty()) throw new IllegalArgumentException(ipAddress + " did not resolve to a hostname"); if (!reverseHostname.get().equals(hostname)) throw new IllegalArgumentException(ipAddress + " resolved to " + reverseHostname.get() + ", which does not match expected hostname " + hostname); } } }
class Dns { private Dns() {} public enum RecordType { FORWARD, PUBLIC_FORWARD, REVERSE } /** Returns the set of DNS record types for a host and its children and the given version (ipv6), host type, etc. */ /** Verify DNS configuration of given hostname and IP address */ public static void verify(String hostname, String ipAddress, NodeType nodeType, NameResolver resolver, CloudAccount cloudAccount, Zone zone) { IP.Version version = IP.Version.fromIpAddress(ipAddress); Set<RecordType> recordTypes = recordTypesFor(version, nodeType, zone.cloud().name(), cloudAccount.isEnclave(zone)); if (recordTypes.contains(RecordType.FORWARD)) { NameResolver.RecordType recordType = version.is6() ? NameResolver.RecordType.AAAA : NameResolver.RecordType.A; Set<String> addresses = resolver.resolve(hostname, recordType); if (!addresses.equals(java.util.Set.of(ipAddress))) throw new IllegalArgumentException("Expected " + hostname + " to resolve to " + ipAddress + ", but got " + addresses); } if (recordTypes.contains(RecordType.REVERSE)) { Optional<String> reverseHostname = resolver.resolveHostname(ipAddress); if (reverseHostname.isEmpty()) throw new IllegalArgumentException(ipAddress + " did not resolve to a hostname"); if (!reverseHostname.get().equals(hostname)) throw new IllegalArgumentException(ipAddress + " resolved to " + reverseHostname.get() + ", which does not match expected hostname " + hostname); } } }
Thank, Unintentional, not so easy to see when Intellij clutters the text with parameter names.
void test_that_redundancy_is_rounded_up() { verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 5), 1,5); verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 4), 1,4); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 10), 2,10); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2,9); }
verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2,9);
void test_that_redundancy_is_rounded_up() { verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 5), 1, 5); verifyFinalRedundancy(new Redundancy(1, 1, 1, 5, 4), 1, 4); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 10), 2, 10); verifyFinalRedundancy(new Redundancy(1, 2, 2, 5, 9), 2, 9); }
class RedundancyTest { @Test void effectively_globally_distributed_is_correct() { assertFalse(createRedundancy(4, 2, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 1, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 2, 12).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 2, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 3, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(1, 1, 1).isEffectivelyGloballyDistributed()); } private static Redundancy createRedundancy(int redundancy, int implicitGroups, int totalNodes) { Redundancy r = new Redundancy(1, redundancy, 1, implicitGroups, totalNodes); return r; } private static void verifyFinalRedundancy(Redundancy redundancy, int expectedFinal, int expectedEffectiveFinal) { assertEquals(expectedEffectiveFinal, redundancy.effectiveFinalRedundancy()); assertEquals(expectedFinal, redundancy.finalRedundancy()); assertEquals(expectedEffectiveFinal, redundancy.effectiveReadyCopies()); assertEquals(expectedFinal, redundancy.readyCopies()); } @Test }
class RedundancyTest { @Test void effectively_globally_distributed_is_correct() { assertFalse(createRedundancy(4, 2, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 1, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 2, 12).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 2, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 3, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(1, 1, 1).isEffectivelyGloballyDistributed()); } private static Redundancy createRedundancy(int redundancy, int implicitGroups, int totalNodes) { Redundancy r = new Redundancy(1, redundancy, 1, implicitGroups, totalNodes); return r; } private static void verifyFinalRedundancy(Redundancy redundancy, int expectedFinal, int expectedEffectiveFinal) { assertEquals(expectedEffectiveFinal, redundancy.effectiveFinalRedundancy()); assertEquals(expectedFinal, redundancy.finalRedundancy()); assertEquals(expectedEffectiveFinal, redundancy.effectiveReadyCopies()); assertEquals(expectedFinal, redundancy.readyCopies()); } @Test }
G or M?
protected Container(TreeConfigProducer<?> parent, String name, boolean retired, int index, DeployState deployState) { super(parent, name); this.name = name; this.parent = parent; this.retired = retired; this.index = index; dumpHeapOnShutdownTimeout = deployState.featureFlags().containerDumpHeapOnShutdownTimeout(); shutdownTimeoutS = deployState.featureFlags().containerShutdownTimeout(); this.defaultHttpServer = new JettyHttpServer("DefaultHttpServer", containerClusterOrNull(parent), deployState); if (getHttp() == null) { addChild(defaultHttpServer); } addBuiltinHandlers(); addChild(new SimpleComponent("com.yahoo.container.jdisc.ConfiguredApplication$ApplicationContext")); appendJvmOptions(jvmOmitStackTraceInFastThrowOption(deployState.featureFlags())); addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000"); }
addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000");
protected Container(TreeConfigProducer<?> parent, String name, boolean retired, int index, DeployState deployState) { super(parent, name); this.name = name; this.parent = parent; this.retired = retired; this.index = index; dumpHeapOnShutdownTimeout = deployState.featureFlags().containerDumpHeapOnShutdownTimeout(); shutdownTimeoutS = deployState.featureFlags().containerShutdownTimeout(); this.defaultHttpServer = new JettyHttpServer("DefaultHttpServer", containerClusterOrNull(parent), deployState); if (getHttp() == null) { addChild(defaultHttpServer); } addBuiltinHandlers(); addChild(new SimpleComponent("com.yahoo.container.jdisc.ConfiguredApplication$ApplicationContext")); appendJvmOptions(jvmOmitStackTraceInFastThrowOption(deployState.featureFlags())); addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000"); }
class Container extends AbstractService implements QrConfig.Producer, ComponentsConfig.Producer, JdiscBindingsConfig.Producer, ContainerHttpConfig.Producer, ContainerMbusConfig.Producer { public static final int BASEPORT = Defaults.getDefaults().vespaWebServicePort(); public static final String SINGLENODE_CONTAINER_SERVICESPEC = "default_singlenode_container"; /** The cluster this container belongs to, or null if it is not added to any cluster */ private ContainerCluster<?> owner = null; private List<LogctlSpec> logctlSpecs = List.of(); protected final TreeConfigProducer<?> parent; private final String name; private boolean requireSpecificPorts = true; private String clusterName = null; private Optional<String> hostResponseHeaderKey = Optional.empty(); /** Whether this node has been marked as retired (e.g, will be removed) */ private final boolean retired; /** The unique index of this node */ private final int index; private final boolean dumpHeapOnShutdownTimeout; private final double shutdownTimeoutS; private final ComponentGroup<Handler> handlers = new ComponentGroup<>(this, "handler"); private final ComponentGroup<Component<?, ?>> components = new ComponentGroup<>(this, "components"); private final JettyHttpServer defaultHttpServer; protected Container(TreeConfigProducer<?> parent, String name, int index, DeployState deployState) { this(parent, name, false, index, deployState); } protected String jvmOmitStackTraceInFastThrowOption(ModelContext.FeatureFlags featureFlags) { return featureFlags.jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type.container); } void setOwner(ContainerCluster<?> owner) { this.owner = owner; } /** True if this container is retired (slated for removal) */ public boolean isRetired() { return retired; } public ComponentGroup<Handler> getHandlers() { return handlers; } public ComponentGroup<?> getComponents() { return components; } public final void addComponent(Component c) { components.addComponent(c); } public final void addSimpleComponent(String idSpec, String classSpec, String bundleSpec) { addComponent(new SimpleComponent(new ComponentModel(idSpec, classSpec, bundleSpec))); } public final void addHandler(Handler h) { handlers.addComponent(h); } /** * If present, this container should emit this header key with the value set to the local hostname * in HTTP responses */ @SuppressWarnings("unused") public void setHostResponseHeaderKey(Optional<String> hostResponseheaderKey) { Objects.requireNonNull(hostResponseheaderKey, "HostResponseheaderKey cannot be null"); this.hostResponseHeaderKey = hostResponseheaderKey; } public Http getHttp() { return (parent instanceof ContainerCluster) ? ((ContainerCluster<?>) parent).getHttp() : null; } @SuppressWarnings("unused") public JettyHttpServer getDefaultHttpServer() { return defaultHttpServer; } /** Returns the index of this node. The index of a given node is stable through changes with best effort. */ public final int index() { return index; } public void addBuiltinHandlers() { } @Override public void initService(DeployState deployState) { if (isInitialized()) return; super.initService(deployState); if (getHttp() == null) { initDefaultJettyConnector(); } } private int getPort(ConnectorFactory connectorFactory) { return connectorFactory.getListenPort(); } private void initDefaultJettyConnector() { defaultHttpServer.addConnector(new ConnectorFactory.Builder("SearchServer", getSearchPort()).build()); } private ContainerServiceType myServiceType = null; /** Subclasses must implement {@link @Override public final String getServiceType() { if (myServiceType == null) { myServiceType = myServiceType(); } return myServiceType.serviceName; } /** Subclasses must implement this for a custom service name. */ protected abstract ContainerServiceType myServiceType(); public void setClusterName(String name) { this.clusterName = name; } @Override public int getWantedPort() { return requiresWantedPort() ? BASEPORT: 0; } /** instance can use any port number for its default HTTP server */ public void useDynamicPorts() { requireSpecificPorts = false; } /** * First container must run on ports familiar to the user. */ @Override public boolean requiresWantedPort() { return requireSpecificPorts && (getHttp() == null); } /** * @return the number of ports needed by the Container */ public int getPortCount() { int httpPorts = (getHttp() != null) ? 0 : 2; return httpPorts + numMessageBusPorts() + numRpcPorts(); } @Override public void allocatePorts(int start, PortAllocBridge from) { if (start == 0) start = BASEPORT; int offset = 0; if (getHttp() == null) { if (requireSpecificPorts) { allocatedSearchPort = from.requirePort(start, "http"); } else { allocatedSearchPort = from.allocatePort("http"); } portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); from.allocatePort("http/1"); portsMeta.on(offset++).tag("http").tag("external"); } else if (getHttp().getHttpServer().isEmpty()) { } else { for (ConnectorFactory connectorFactory : getHttp().getHttpServer().get().getConnectorFactories()) { int port = getPort(connectorFactory); String name = "http/" + connectorFactory.getName(); from.requirePort(port, name); if (offset == 0) { portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); } else { portsMeta.on(offset++).tag("http").tag("external"); } } } if (messageBusEnabled()) { allocatedMessagingPort = from.allocatePort("messaging"); portsMeta.on(offset++).tag("rpc").tag("messaging"); } if (rpcServerEnabled()) { allocatedRpcPort = from.allocatePort("rpc/admin"); portsMeta.on(offset++).tag("rpc").tag("admin"); } } protected int allocatedSearchPort = 0; /** * @return the actual search port * TODO: Remove. Use {@link */ public int getSearchPort() { if (getHttp() != null) throw new AssertionError("getSearchPort must not be used when http section is present."); return allocatedSearchPort; } protected int allocatedRpcPort = 0; protected int getRpcPort() { return allocatedRpcPort; } protected int numRpcPorts() { return rpcServerEnabled() ? 1 : 0; } protected int allocatedMessagingPort = 0; private int getMessagingPort() { return allocatedMessagingPort; } protected int numMessageBusPorts() { return messageBusEnabled() ? 1 : 0; } @Override public int getHealthPort() { final Http http = getHttp(); if (http != null) { if (http.getHttpServer().isEmpty()) { return -1; } else { return getRelativePort(0); } } else { return httpServerEnabled() ? getSearchPort() : -1; } } public Optional<String> getStartupCommand() { return Optional.of("PRELOAD=" + getPreLoad() + " exec ${VESPA_HOME}/libexec/vespa/vespa-wrapper vespa-start-container-daemon " + getJvmOptions() + " "); } @Override public void getConfig(QrConfig.Builder builder) { builder.rpc(new Rpc.Builder() .enabled(rpcServerEnabled()) .port(getRpcPort()) .slobrokId(serviceSlobrokId())) .discriminator((clusterName != null ? clusterName + "." : "" ) + name) .clustername(clusterName != null ? clusterName : "") .nodeIndex(index) .shutdown.dumpHeapOnTimeout(dumpHeapOnShutdownTimeout) .timeout(shutdownTimeoutS); } /** Returns the jvm args set explicitly for this node */ public String getAssignedJvmOptions() { return super.getJvmOptions(); } private String serviceSlobrokId() { return "vespa/service/" + getConfigId(); } @Override public void getConfig(ComponentsConfig.Builder builder) { builder.setApplyOnRestart(owner.getDeferChangesUntilRestart()); builder.components.addAll(ComponentsConfigGenerator.generate(allEnabledComponents())); } private Collection<Component<?, ?>> allEnabledComponents() { Collection<Component<?, ?>> allComponents = new ArrayList<>(); addAllEnabledComponents(allComponents, this); return Collections.unmodifiableCollection(allComponents); } private void addAllEnabledComponents(Collection<Component<?, ?>> allComponents, TreeConfigProducer<?> current) { for (var child: current.getChildren().values()) { if ( ! httpServerEnabled() && isHttpServer(child)) continue; if (child instanceof Component) allComponents.add((Component<?, ?>) child); if (child instanceof TreeConfigProducer<?> t) { addAllEnabledComponents(allComponents, t); } } } private boolean isHttpServer(AnyConfigProducer component) { return component instanceof JettyHttpServer; } @Override public final void getConfig(JdiscBindingsConfig.Builder builder) { builder.handlers(DiscBindingsConfigGenerator.generate(handlers.getComponents())); } @Override public void getConfig(ContainerHttpConfig.Builder builder) { hostResponseHeaderKey.ifPresent(builder::hostResponseHeaderKey); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { builder.port(getMessagingPort()); } @Override public HashMap<String,String> getDefaultMetricDimensions(){ HashMap<String, String> dimensions = new HashMap<>(); if (clusterName != null) dimensions.put("clustername", clusterName); return dimensions; } protected String prepareStopCommand(Duration timeout) { long rpcTimeoutSeconds = timeout.toSeconds() + 10; String rpcParams = "-t " + rpcTimeoutSeconds + " tcp/localhost:" + getRpcPort() + " prepareStop d:" + timeout.toSeconds(); return getDefaults().underVespaHome("bin/vespa-rpc-invoke") + " " + rpcParams; } private boolean messageBusEnabled() { return containerCluster().isPresent() && containerCluster().get().messageBusEnabled(); } private boolean httpServerEnabled() { return containerCluster().isPresent() && containerCluster().get().httpServerEnabled(); } private boolean rpcServerEnabled() { return containerCluster().isPresent() && containerCluster().get().rpcServerEnabled(); } protected Optional<ContainerCluster> containerCluster() { return Optional.ofNullable(containerClusterOrNull(parent)); } private static ContainerCluster containerClusterOrNull(AnyConfigProducer producer) { return producer instanceof ContainerCluster<?> ? (ContainerCluster<?>) producer : null; } void setLogctlSpecs(List<LogctlSpec> logctlSpecs) { this.logctlSpecs = logctlSpecs; } @Override public List<LogctlSpec> getLogctlSpecs() { return logctlSpecs; } }
class Container extends AbstractService implements QrConfig.Producer, ComponentsConfig.Producer, JdiscBindingsConfig.Producer, ContainerHttpConfig.Producer, ContainerMbusConfig.Producer { public static final int BASEPORT = Defaults.getDefaults().vespaWebServicePort(); public static final String SINGLENODE_CONTAINER_SERVICESPEC = "default_singlenode_container"; /** The cluster this container belongs to, or null if it is not added to any cluster */ private ContainerCluster<?> owner = null; private List<LogctlSpec> logctlSpecs = List.of(); protected final TreeConfigProducer<?> parent; private final String name; private boolean requireSpecificPorts = true; private String clusterName = null; private Optional<String> hostResponseHeaderKey = Optional.empty(); /** Whether this node has been marked as retired (e.g, will be removed) */ private final boolean retired; /** The unique index of this node */ private final int index; private final boolean dumpHeapOnShutdownTimeout; private final double shutdownTimeoutS; private final ComponentGroup<Handler> handlers = new ComponentGroup<>(this, "handler"); private final ComponentGroup<Component<?, ?>> components = new ComponentGroup<>(this, "components"); private final JettyHttpServer defaultHttpServer; protected Container(TreeConfigProducer<?> parent, String name, int index, DeployState deployState) { this(parent, name, false, index, deployState); } protected String jvmOmitStackTraceInFastThrowOption(ModelContext.FeatureFlags featureFlags) { return featureFlags.jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type.container); } void setOwner(ContainerCluster<?> owner) { this.owner = owner; } /** True if this container is retired (slated for removal) */ public boolean isRetired() { return retired; } public ComponentGroup<Handler> getHandlers() { return handlers; } public ComponentGroup<?> getComponents() { return components; } public final void addComponent(Component c) { components.addComponent(c); } public final void addSimpleComponent(String idSpec, String classSpec, String bundleSpec) { addComponent(new SimpleComponent(new ComponentModel(idSpec, classSpec, bundleSpec))); } public final void addHandler(Handler h) { handlers.addComponent(h); } /** * If present, this container should emit this header key with the value set to the local hostname * in HTTP responses */ @SuppressWarnings("unused") public void setHostResponseHeaderKey(Optional<String> hostResponseheaderKey) { Objects.requireNonNull(hostResponseheaderKey, "HostResponseheaderKey cannot be null"); this.hostResponseHeaderKey = hostResponseheaderKey; } public Http getHttp() { return (parent instanceof ContainerCluster) ? ((ContainerCluster<?>) parent).getHttp() : null; } @SuppressWarnings("unused") public JettyHttpServer getDefaultHttpServer() { return defaultHttpServer; } /** Returns the index of this node. The index of a given node is stable through changes with best effort. */ public final int index() { return index; } public void addBuiltinHandlers() { } @Override public void initService(DeployState deployState) { if (isInitialized()) return; super.initService(deployState); if (getHttp() == null) { initDefaultJettyConnector(); } } private int getPort(ConnectorFactory connectorFactory) { return connectorFactory.getListenPort(); } private void initDefaultJettyConnector() { defaultHttpServer.addConnector(new ConnectorFactory.Builder("SearchServer", getSearchPort()).build()); } private ContainerServiceType myServiceType = null; /** Subclasses must implement {@link @Override public final String getServiceType() { if (myServiceType == null) { myServiceType = myServiceType(); } return myServiceType.serviceName; } /** Subclasses must implement this for a custom service name. */ protected abstract ContainerServiceType myServiceType(); public void setClusterName(String name) { this.clusterName = name; } @Override public int getWantedPort() { return requiresWantedPort() ? BASEPORT: 0; } /** instance can use any port number for its default HTTP server */ public void useDynamicPorts() { requireSpecificPorts = false; } /** * First container must run on ports familiar to the user. */ @Override public boolean requiresWantedPort() { return requireSpecificPorts && (getHttp() == null); } /** * @return the number of ports needed by the Container */ public int getPortCount() { int httpPorts = (getHttp() != null) ? 0 : 2; return httpPorts + numMessageBusPorts() + numRpcPorts(); } @Override public void allocatePorts(int start, PortAllocBridge from) { if (start == 0) start = BASEPORT; int offset = 0; if (getHttp() == null) { if (requireSpecificPorts) { allocatedSearchPort = from.requirePort(start, "http"); } else { allocatedSearchPort = from.allocatePort("http"); } portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); from.allocatePort("http/1"); portsMeta.on(offset++).tag("http").tag("external"); } else if (getHttp().getHttpServer().isEmpty()) { } else { for (ConnectorFactory connectorFactory : getHttp().getHttpServer().get().getConnectorFactories()) { int port = getPort(connectorFactory); String name = "http/" + connectorFactory.getName(); from.requirePort(port, name); if (offset == 0) { portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); } else { portsMeta.on(offset++).tag("http").tag("external"); } } } if (messageBusEnabled()) { allocatedMessagingPort = from.allocatePort("messaging"); portsMeta.on(offset++).tag("rpc").tag("messaging"); } if (rpcServerEnabled()) { allocatedRpcPort = from.allocatePort("rpc/admin"); portsMeta.on(offset++).tag("rpc").tag("admin"); } } protected int allocatedSearchPort = 0; /** * @return the actual search port * TODO: Remove. Use {@link */ public int getSearchPort() { if (getHttp() != null) throw new AssertionError("getSearchPort must not be used when http section is present."); return allocatedSearchPort; } protected int allocatedRpcPort = 0; protected int getRpcPort() { return allocatedRpcPort; } protected int numRpcPorts() { return rpcServerEnabled() ? 1 : 0; } protected int allocatedMessagingPort = 0; private int getMessagingPort() { return allocatedMessagingPort; } protected int numMessageBusPorts() { return messageBusEnabled() ? 1 : 0; } @Override public int getHealthPort() { final Http http = getHttp(); if (http != null) { if (http.getHttpServer().isEmpty()) { return -1; } else { return getRelativePort(0); } } else { return httpServerEnabled() ? getSearchPort() : -1; } } public Optional<String> getStartupCommand() { return Optional.of("PRELOAD=" + getPreLoad() + " exec ${VESPA_HOME}/libexec/vespa/vespa-wrapper vespa-start-container-daemon " + getJvmOptions() + " "); } @Override public void getConfig(QrConfig.Builder builder) { builder.rpc(new Rpc.Builder() .enabled(rpcServerEnabled()) .port(getRpcPort()) .slobrokId(serviceSlobrokId())) .discriminator((clusterName != null ? clusterName + "." : "" ) + name) .clustername(clusterName != null ? clusterName : "") .nodeIndex(index) .shutdown.dumpHeapOnTimeout(dumpHeapOnShutdownTimeout) .timeout(shutdownTimeoutS); } /** Returns the jvm args set explicitly for this node */ public String getAssignedJvmOptions() { return super.getJvmOptions(); } private String serviceSlobrokId() { return "vespa/service/" + getConfigId(); } @Override public void getConfig(ComponentsConfig.Builder builder) { builder.setApplyOnRestart(owner.getDeferChangesUntilRestart()); builder.components.addAll(ComponentsConfigGenerator.generate(allEnabledComponents())); } private Collection<Component<?, ?>> allEnabledComponents() { Collection<Component<?, ?>> allComponents = new ArrayList<>(); addAllEnabledComponents(allComponents, this); return Collections.unmodifiableCollection(allComponents); } private void addAllEnabledComponents(Collection<Component<?, ?>> allComponents, TreeConfigProducer<?> current) { for (var child: current.getChildren().values()) { if ( ! httpServerEnabled() && isHttpServer(child)) continue; if (child instanceof Component) allComponents.add((Component<?, ?>) child); if (child instanceof TreeConfigProducer<?> t) { addAllEnabledComponents(allComponents, t); } } } private boolean isHttpServer(AnyConfigProducer component) { return component instanceof JettyHttpServer; } @Override public final void getConfig(JdiscBindingsConfig.Builder builder) { builder.handlers(DiscBindingsConfigGenerator.generate(handlers.getComponents())); } @Override public void getConfig(ContainerHttpConfig.Builder builder) { hostResponseHeaderKey.ifPresent(builder::hostResponseHeaderKey); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { builder.port(getMessagingPort()); } @Override public HashMap<String,String> getDefaultMetricDimensions(){ HashMap<String, String> dimensions = new HashMap<>(); if (clusterName != null) dimensions.put("clustername", clusterName); return dimensions; } protected String prepareStopCommand(Duration timeout) { long rpcTimeoutSeconds = timeout.toSeconds() + 10; String rpcParams = "-t " + rpcTimeoutSeconds + " tcp/localhost:" + getRpcPort() + " prepareStop d:" + timeout.toSeconds(); return getDefaults().underVespaHome("bin/vespa-rpc-invoke") + " " + rpcParams; } private boolean messageBusEnabled() { return containerCluster().isPresent() && containerCluster().get().messageBusEnabled(); } private boolean httpServerEnabled() { return containerCluster().isPresent() && containerCluster().get().httpServerEnabled(); } private boolean rpcServerEnabled() { return containerCluster().isPresent() && containerCluster().get().rpcServerEnabled(); } protected Optional<ContainerCluster> containerCluster() { return Optional.ofNullable(containerClusterOrNull(parent)); } private static ContainerCluster containerClusterOrNull(AnyConfigProducer producer) { return producer instanceof ContainerCluster<?> ? (ContainerCluster<?>) producer : null; } void setLogctlSpecs(List<LogctlSpec> logctlSpecs) { this.logctlSpecs = logctlSpecs; } @Override public List<LogctlSpec> getLogctlSpecs() { return logctlSpecs; } }
M :)
protected Container(TreeConfigProducer<?> parent, String name, boolean retired, int index, DeployState deployState) { super(parent, name); this.name = name; this.parent = parent; this.retired = retired; this.index = index; dumpHeapOnShutdownTimeout = deployState.featureFlags().containerDumpHeapOnShutdownTimeout(); shutdownTimeoutS = deployState.featureFlags().containerShutdownTimeout(); this.defaultHttpServer = new JettyHttpServer("DefaultHttpServer", containerClusterOrNull(parent), deployState); if (getHttp() == null) { addChild(defaultHttpServer); } addBuiltinHandlers(); addChild(new SimpleComponent("com.yahoo.container.jdisc.ConfiguredApplication$ApplicationContext")); appendJvmOptions(jvmOmitStackTraceInFastThrowOption(deployState.featureFlags())); addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000"); }
addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000");
protected Container(TreeConfigProducer<?> parent, String name, boolean retired, int index, DeployState deployState) { super(parent, name); this.name = name; this.parent = parent; this.retired = retired; this.index = index; dumpHeapOnShutdownTimeout = deployState.featureFlags().containerDumpHeapOnShutdownTimeout(); shutdownTimeoutS = deployState.featureFlags().containerShutdownTimeout(); this.defaultHttpServer = new JettyHttpServer("DefaultHttpServer", containerClusterOrNull(parent), deployState); if (getHttp() == null) { addChild(defaultHttpServer); } addBuiltinHandlers(); addChild(new SimpleComponent("com.yahoo.container.jdisc.ConfiguredApplication$ApplicationContext")); appendJvmOptions(jvmOmitStackTraceInFastThrowOption(deployState.featureFlags())); addEnvironmentVariable("VESPA_MALLOC_MMAP_THRESHOLD","0x1000000"); }
class Container extends AbstractService implements QrConfig.Producer, ComponentsConfig.Producer, JdiscBindingsConfig.Producer, ContainerHttpConfig.Producer, ContainerMbusConfig.Producer { public static final int BASEPORT = Defaults.getDefaults().vespaWebServicePort(); public static final String SINGLENODE_CONTAINER_SERVICESPEC = "default_singlenode_container"; /** The cluster this container belongs to, or null if it is not added to any cluster */ private ContainerCluster<?> owner = null; private List<LogctlSpec> logctlSpecs = List.of(); protected final TreeConfigProducer<?> parent; private final String name; private boolean requireSpecificPorts = true; private String clusterName = null; private Optional<String> hostResponseHeaderKey = Optional.empty(); /** Whether this node has been marked as retired (e.g, will be removed) */ private final boolean retired; /** The unique index of this node */ private final int index; private final boolean dumpHeapOnShutdownTimeout; private final double shutdownTimeoutS; private final ComponentGroup<Handler> handlers = new ComponentGroup<>(this, "handler"); private final ComponentGroup<Component<?, ?>> components = new ComponentGroup<>(this, "components"); private final JettyHttpServer defaultHttpServer; protected Container(TreeConfigProducer<?> parent, String name, int index, DeployState deployState) { this(parent, name, false, index, deployState); } protected String jvmOmitStackTraceInFastThrowOption(ModelContext.FeatureFlags featureFlags) { return featureFlags.jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type.container); } void setOwner(ContainerCluster<?> owner) { this.owner = owner; } /** True if this container is retired (slated for removal) */ public boolean isRetired() { return retired; } public ComponentGroup<Handler> getHandlers() { return handlers; } public ComponentGroup<?> getComponents() { return components; } public final void addComponent(Component c) { components.addComponent(c); } public final void addSimpleComponent(String idSpec, String classSpec, String bundleSpec) { addComponent(new SimpleComponent(new ComponentModel(idSpec, classSpec, bundleSpec))); } public final void addHandler(Handler h) { handlers.addComponent(h); } /** * If present, this container should emit this header key with the value set to the local hostname * in HTTP responses */ @SuppressWarnings("unused") public void setHostResponseHeaderKey(Optional<String> hostResponseheaderKey) { Objects.requireNonNull(hostResponseheaderKey, "HostResponseheaderKey cannot be null"); this.hostResponseHeaderKey = hostResponseheaderKey; } public Http getHttp() { return (parent instanceof ContainerCluster) ? ((ContainerCluster<?>) parent).getHttp() : null; } @SuppressWarnings("unused") public JettyHttpServer getDefaultHttpServer() { return defaultHttpServer; } /** Returns the index of this node. The index of a given node is stable through changes with best effort. */ public final int index() { return index; } public void addBuiltinHandlers() { } @Override public void initService(DeployState deployState) { if (isInitialized()) return; super.initService(deployState); if (getHttp() == null) { initDefaultJettyConnector(); } } private int getPort(ConnectorFactory connectorFactory) { return connectorFactory.getListenPort(); } private void initDefaultJettyConnector() { defaultHttpServer.addConnector(new ConnectorFactory.Builder("SearchServer", getSearchPort()).build()); } private ContainerServiceType myServiceType = null; /** Subclasses must implement {@link @Override public final String getServiceType() { if (myServiceType == null) { myServiceType = myServiceType(); } return myServiceType.serviceName; } /** Subclasses must implement this for a custom service name. */ protected abstract ContainerServiceType myServiceType(); public void setClusterName(String name) { this.clusterName = name; } @Override public int getWantedPort() { return requiresWantedPort() ? BASEPORT: 0; } /** instance can use any port number for its default HTTP server */ public void useDynamicPorts() { requireSpecificPorts = false; } /** * First container must run on ports familiar to the user. */ @Override public boolean requiresWantedPort() { return requireSpecificPorts && (getHttp() == null); } /** * @return the number of ports needed by the Container */ public int getPortCount() { int httpPorts = (getHttp() != null) ? 0 : 2; return httpPorts + numMessageBusPorts() + numRpcPorts(); } @Override public void allocatePorts(int start, PortAllocBridge from) { if (start == 0) start = BASEPORT; int offset = 0; if (getHttp() == null) { if (requireSpecificPorts) { allocatedSearchPort = from.requirePort(start, "http"); } else { allocatedSearchPort = from.allocatePort("http"); } portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); from.allocatePort("http/1"); portsMeta.on(offset++).tag("http").tag("external"); } else if (getHttp().getHttpServer().isEmpty()) { } else { for (ConnectorFactory connectorFactory : getHttp().getHttpServer().get().getConnectorFactories()) { int port = getPort(connectorFactory); String name = "http/" + connectorFactory.getName(); from.requirePort(port, name); if (offset == 0) { portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); } else { portsMeta.on(offset++).tag("http").tag("external"); } } } if (messageBusEnabled()) { allocatedMessagingPort = from.allocatePort("messaging"); portsMeta.on(offset++).tag("rpc").tag("messaging"); } if (rpcServerEnabled()) { allocatedRpcPort = from.allocatePort("rpc/admin"); portsMeta.on(offset++).tag("rpc").tag("admin"); } } protected int allocatedSearchPort = 0; /** * @return the actual search port * TODO: Remove. Use {@link */ public int getSearchPort() { if (getHttp() != null) throw new AssertionError("getSearchPort must not be used when http section is present."); return allocatedSearchPort; } protected int allocatedRpcPort = 0; protected int getRpcPort() { return allocatedRpcPort; } protected int numRpcPorts() { return rpcServerEnabled() ? 1 : 0; } protected int allocatedMessagingPort = 0; private int getMessagingPort() { return allocatedMessagingPort; } protected int numMessageBusPorts() { return messageBusEnabled() ? 1 : 0; } @Override public int getHealthPort() { final Http http = getHttp(); if (http != null) { if (http.getHttpServer().isEmpty()) { return -1; } else { return getRelativePort(0); } } else { return httpServerEnabled() ? getSearchPort() : -1; } } public Optional<String> getStartupCommand() { return Optional.of("PRELOAD=" + getPreLoad() + " exec ${VESPA_HOME}/libexec/vespa/vespa-wrapper vespa-start-container-daemon " + getJvmOptions() + " "); } @Override public void getConfig(QrConfig.Builder builder) { builder.rpc(new Rpc.Builder() .enabled(rpcServerEnabled()) .port(getRpcPort()) .slobrokId(serviceSlobrokId())) .discriminator((clusterName != null ? clusterName + "." : "" ) + name) .clustername(clusterName != null ? clusterName : "") .nodeIndex(index) .shutdown.dumpHeapOnTimeout(dumpHeapOnShutdownTimeout) .timeout(shutdownTimeoutS); } /** Returns the jvm args set explicitly for this node */ public String getAssignedJvmOptions() { return super.getJvmOptions(); } private String serviceSlobrokId() { return "vespa/service/" + getConfigId(); } @Override public void getConfig(ComponentsConfig.Builder builder) { builder.setApplyOnRestart(owner.getDeferChangesUntilRestart()); builder.components.addAll(ComponentsConfigGenerator.generate(allEnabledComponents())); } private Collection<Component<?, ?>> allEnabledComponents() { Collection<Component<?, ?>> allComponents = new ArrayList<>(); addAllEnabledComponents(allComponents, this); return Collections.unmodifiableCollection(allComponents); } private void addAllEnabledComponents(Collection<Component<?, ?>> allComponents, TreeConfigProducer<?> current) { for (var child: current.getChildren().values()) { if ( ! httpServerEnabled() && isHttpServer(child)) continue; if (child instanceof Component) allComponents.add((Component<?, ?>) child); if (child instanceof TreeConfigProducer<?> t) { addAllEnabledComponents(allComponents, t); } } } private boolean isHttpServer(AnyConfigProducer component) { return component instanceof JettyHttpServer; } @Override public final void getConfig(JdiscBindingsConfig.Builder builder) { builder.handlers(DiscBindingsConfigGenerator.generate(handlers.getComponents())); } @Override public void getConfig(ContainerHttpConfig.Builder builder) { hostResponseHeaderKey.ifPresent(builder::hostResponseHeaderKey); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { builder.port(getMessagingPort()); } @Override public HashMap<String,String> getDefaultMetricDimensions(){ HashMap<String, String> dimensions = new HashMap<>(); if (clusterName != null) dimensions.put("clustername", clusterName); return dimensions; } protected String prepareStopCommand(Duration timeout) { long rpcTimeoutSeconds = timeout.toSeconds() + 10; String rpcParams = "-t " + rpcTimeoutSeconds + " tcp/localhost:" + getRpcPort() + " prepareStop d:" + timeout.toSeconds(); return getDefaults().underVespaHome("bin/vespa-rpc-invoke") + " " + rpcParams; } private boolean messageBusEnabled() { return containerCluster().isPresent() && containerCluster().get().messageBusEnabled(); } private boolean httpServerEnabled() { return containerCluster().isPresent() && containerCluster().get().httpServerEnabled(); } private boolean rpcServerEnabled() { return containerCluster().isPresent() && containerCluster().get().rpcServerEnabled(); } protected Optional<ContainerCluster> containerCluster() { return Optional.ofNullable(containerClusterOrNull(parent)); } private static ContainerCluster containerClusterOrNull(AnyConfigProducer producer) { return producer instanceof ContainerCluster<?> ? (ContainerCluster<?>) producer : null; } void setLogctlSpecs(List<LogctlSpec> logctlSpecs) { this.logctlSpecs = logctlSpecs; } @Override public List<LogctlSpec> getLogctlSpecs() { return logctlSpecs; } }
class Container extends AbstractService implements QrConfig.Producer, ComponentsConfig.Producer, JdiscBindingsConfig.Producer, ContainerHttpConfig.Producer, ContainerMbusConfig.Producer { public static final int BASEPORT = Defaults.getDefaults().vespaWebServicePort(); public static final String SINGLENODE_CONTAINER_SERVICESPEC = "default_singlenode_container"; /** The cluster this container belongs to, or null if it is not added to any cluster */ private ContainerCluster<?> owner = null; private List<LogctlSpec> logctlSpecs = List.of(); protected final TreeConfigProducer<?> parent; private final String name; private boolean requireSpecificPorts = true; private String clusterName = null; private Optional<String> hostResponseHeaderKey = Optional.empty(); /** Whether this node has been marked as retired (e.g, will be removed) */ private final boolean retired; /** The unique index of this node */ private final int index; private final boolean dumpHeapOnShutdownTimeout; private final double shutdownTimeoutS; private final ComponentGroup<Handler> handlers = new ComponentGroup<>(this, "handler"); private final ComponentGroup<Component<?, ?>> components = new ComponentGroup<>(this, "components"); private final JettyHttpServer defaultHttpServer; protected Container(TreeConfigProducer<?> parent, String name, int index, DeployState deployState) { this(parent, name, false, index, deployState); } protected String jvmOmitStackTraceInFastThrowOption(ModelContext.FeatureFlags featureFlags) { return featureFlags.jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type.container); } void setOwner(ContainerCluster<?> owner) { this.owner = owner; } /** True if this container is retired (slated for removal) */ public boolean isRetired() { return retired; } public ComponentGroup<Handler> getHandlers() { return handlers; } public ComponentGroup<?> getComponents() { return components; } public final void addComponent(Component c) { components.addComponent(c); } public final void addSimpleComponent(String idSpec, String classSpec, String bundleSpec) { addComponent(new SimpleComponent(new ComponentModel(idSpec, classSpec, bundleSpec))); } public final void addHandler(Handler h) { handlers.addComponent(h); } /** * If present, this container should emit this header key with the value set to the local hostname * in HTTP responses */ @SuppressWarnings("unused") public void setHostResponseHeaderKey(Optional<String> hostResponseheaderKey) { Objects.requireNonNull(hostResponseheaderKey, "HostResponseheaderKey cannot be null"); this.hostResponseHeaderKey = hostResponseheaderKey; } public Http getHttp() { return (parent instanceof ContainerCluster) ? ((ContainerCluster<?>) parent).getHttp() : null; } @SuppressWarnings("unused") public JettyHttpServer getDefaultHttpServer() { return defaultHttpServer; } /** Returns the index of this node. The index of a given node is stable through changes with best effort. */ public final int index() { return index; } public void addBuiltinHandlers() { } @Override public void initService(DeployState deployState) { if (isInitialized()) return; super.initService(deployState); if (getHttp() == null) { initDefaultJettyConnector(); } } private int getPort(ConnectorFactory connectorFactory) { return connectorFactory.getListenPort(); } private void initDefaultJettyConnector() { defaultHttpServer.addConnector(new ConnectorFactory.Builder("SearchServer", getSearchPort()).build()); } private ContainerServiceType myServiceType = null; /** Subclasses must implement {@link @Override public final String getServiceType() { if (myServiceType == null) { myServiceType = myServiceType(); } return myServiceType.serviceName; } /** Subclasses must implement this for a custom service name. */ protected abstract ContainerServiceType myServiceType(); public void setClusterName(String name) { this.clusterName = name; } @Override public int getWantedPort() { return requiresWantedPort() ? BASEPORT: 0; } /** instance can use any port number for its default HTTP server */ public void useDynamicPorts() { requireSpecificPorts = false; } /** * First container must run on ports familiar to the user. */ @Override public boolean requiresWantedPort() { return requireSpecificPorts && (getHttp() == null); } /** * @return the number of ports needed by the Container */ public int getPortCount() { int httpPorts = (getHttp() != null) ? 0 : 2; return httpPorts + numMessageBusPorts() + numRpcPorts(); } @Override public void allocatePorts(int start, PortAllocBridge from) { if (start == 0) start = BASEPORT; int offset = 0; if (getHttp() == null) { if (requireSpecificPorts) { allocatedSearchPort = from.requirePort(start, "http"); } else { allocatedSearchPort = from.allocatePort("http"); } portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); from.allocatePort("http/1"); portsMeta.on(offset++).tag("http").tag("external"); } else if (getHttp().getHttpServer().isEmpty()) { } else { for (ConnectorFactory connectorFactory : getHttp().getHttpServer().get().getConnectorFactories()) { int port = getPort(connectorFactory); String name = "http/" + connectorFactory.getName(); from.requirePort(port, name); if (offset == 0) { portsMeta.on(offset++).tag("http").tag("query").tag("external").tag("state"); } else { portsMeta.on(offset++).tag("http").tag("external"); } } } if (messageBusEnabled()) { allocatedMessagingPort = from.allocatePort("messaging"); portsMeta.on(offset++).tag("rpc").tag("messaging"); } if (rpcServerEnabled()) { allocatedRpcPort = from.allocatePort("rpc/admin"); portsMeta.on(offset++).tag("rpc").tag("admin"); } } protected int allocatedSearchPort = 0; /** * @return the actual search port * TODO: Remove. Use {@link */ public int getSearchPort() { if (getHttp() != null) throw new AssertionError("getSearchPort must not be used when http section is present."); return allocatedSearchPort; } protected int allocatedRpcPort = 0; protected int getRpcPort() { return allocatedRpcPort; } protected int numRpcPorts() { return rpcServerEnabled() ? 1 : 0; } protected int allocatedMessagingPort = 0; private int getMessagingPort() { return allocatedMessagingPort; } protected int numMessageBusPorts() { return messageBusEnabled() ? 1 : 0; } @Override public int getHealthPort() { final Http http = getHttp(); if (http != null) { if (http.getHttpServer().isEmpty()) { return -1; } else { return getRelativePort(0); } } else { return httpServerEnabled() ? getSearchPort() : -1; } } public Optional<String> getStartupCommand() { return Optional.of("PRELOAD=" + getPreLoad() + " exec ${VESPA_HOME}/libexec/vespa/vespa-wrapper vespa-start-container-daemon " + getJvmOptions() + " "); } @Override public void getConfig(QrConfig.Builder builder) { builder.rpc(new Rpc.Builder() .enabled(rpcServerEnabled()) .port(getRpcPort()) .slobrokId(serviceSlobrokId())) .discriminator((clusterName != null ? clusterName + "." : "" ) + name) .clustername(clusterName != null ? clusterName : "") .nodeIndex(index) .shutdown.dumpHeapOnTimeout(dumpHeapOnShutdownTimeout) .timeout(shutdownTimeoutS); } /** Returns the jvm args set explicitly for this node */ public String getAssignedJvmOptions() { return super.getJvmOptions(); } private String serviceSlobrokId() { return "vespa/service/" + getConfigId(); } @Override public void getConfig(ComponentsConfig.Builder builder) { builder.setApplyOnRestart(owner.getDeferChangesUntilRestart()); builder.components.addAll(ComponentsConfigGenerator.generate(allEnabledComponents())); } private Collection<Component<?, ?>> allEnabledComponents() { Collection<Component<?, ?>> allComponents = new ArrayList<>(); addAllEnabledComponents(allComponents, this); return Collections.unmodifiableCollection(allComponents); } private void addAllEnabledComponents(Collection<Component<?, ?>> allComponents, TreeConfigProducer<?> current) { for (var child: current.getChildren().values()) { if ( ! httpServerEnabled() && isHttpServer(child)) continue; if (child instanceof Component) allComponents.add((Component<?, ?>) child); if (child instanceof TreeConfigProducer<?> t) { addAllEnabledComponents(allComponents, t); } } } private boolean isHttpServer(AnyConfigProducer component) { return component instanceof JettyHttpServer; } @Override public final void getConfig(JdiscBindingsConfig.Builder builder) { builder.handlers(DiscBindingsConfigGenerator.generate(handlers.getComponents())); } @Override public void getConfig(ContainerHttpConfig.Builder builder) { hostResponseHeaderKey.ifPresent(builder::hostResponseHeaderKey); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { builder.port(getMessagingPort()); } @Override public HashMap<String,String> getDefaultMetricDimensions(){ HashMap<String, String> dimensions = new HashMap<>(); if (clusterName != null) dimensions.put("clustername", clusterName); return dimensions; } protected String prepareStopCommand(Duration timeout) { long rpcTimeoutSeconds = timeout.toSeconds() + 10; String rpcParams = "-t " + rpcTimeoutSeconds + " tcp/localhost:" + getRpcPort() + " prepareStop d:" + timeout.toSeconds(); return getDefaults().underVespaHome("bin/vespa-rpc-invoke") + " " + rpcParams; } private boolean messageBusEnabled() { return containerCluster().isPresent() && containerCluster().get().messageBusEnabled(); } private boolean httpServerEnabled() { return containerCluster().isPresent() && containerCluster().get().httpServerEnabled(); } private boolean rpcServerEnabled() { return containerCluster().isPresent() && containerCluster().get().rpcServerEnabled(); } protected Optional<ContainerCluster> containerCluster() { return Optional.ofNullable(containerClusterOrNull(parent)); } private static ContainerCluster containerClusterOrNull(AnyConfigProducer producer) { return producer instanceof ContainerCluster<?> ? (ContainerCluster<?>) producer : null; } void setLogctlSpecs(List<LogctlSpec> logctlSpecs) { this.logctlSpecs = logctlSpecs; } @Override public List<LogctlSpec> getLogctlSpecs() { return logctlSpecs; } }
Consider adding TODO to remove the fallback (throw in default).
return switch (name) { case "digicert" -> digicert; case "globalsign" -> globalsign; case "zerossl" -> zerossl; default -> digicert; };
default -> digicert;
return switch (name) { case "digicert" -> digicert; case "globalsign" -> globalsign; case "zerossl" -> zerossl; default -> digicert; }
class EndpointCertificateMetadataSerializer { private final static String keyNameField = "keyName"; private final static String certNameField = "certName"; private final static String versionField = "version"; private final static String issuerField = "issuer"; public static void toSlime(EndpointCertificateMetadata metadata, Cursor object) { object.setString(keyNameField, metadata.keyName()); object.setString(certNameField, metadata.certName()); object.setLong(versionField, metadata.version()); object.setString(issuerField, serializedValue(metadata.issuer())); } public static EndpointCertificateMetadata fromSlime(Inspector inspector) { if (inspector.type() == Type.OBJECT) { return new EndpointCertificateMetadata( inspector.field(keyNameField).asString(), inspector.field(certNameField).asString(), Math.toIntExact(inspector.field(versionField).asLong()), providerOf(SlimeUtils.optionalString(inspector.field(issuerField)).orElse(""))); } throw new IllegalArgumentException("Unknown format encountered for endpoint certificate metadata!"); } private static EndpointCertificateMetadata.Provider providerOf(String name) { ; } private static String serializedValue(EndpointCertificateMetadata.Provider provider) { return switch (provider) { case digicert -> "digicert"; case globalsign -> "globalsign"; case zerossl -> "zerossl"; }; } }
class EndpointCertificateMetadataSerializer { private final static String keyNameField = "keyName"; private final static String certNameField = "certName"; private final static String versionField = "version"; private final static String issuerField = "issuer"; public static void toSlime(EndpointCertificateMetadata metadata, Cursor object) { object.setString(keyNameField, metadata.keyName()); object.setString(certNameField, metadata.certName()); object.setLong(versionField, metadata.version()); object.setString(issuerField, serializedValue(metadata.issuer())); } public static EndpointCertificateMetadata fromSlime(Inspector inspector) { if (inspector.type() == Type.OBJECT) { return new EndpointCertificateMetadata( inspector.field(keyNameField).asString(), inspector.field(certNameField).asString(), Math.toIntExact(inspector.field(versionField).asLong()), providerOf(SlimeUtils.optionalString(inspector.field(issuerField)).orElse(""))); } throw new IllegalArgumentException("Unknown format encountered for endpoint certificate metadata!"); } private static EndpointCertificateMetadata.Provider providerOf(String name) { ; } private static String serializedValue(EndpointCertificateMetadata.Provider provider) { return switch (provider) { case digicert -> "digicert"; case globalsign -> "globalsign"; case zerossl -> "zerossl"; }; } }
:++++++++++1:
protected void registerTests(Map<Integer, RunnableTest> out) { out.put(DocumentProtocol.MESSAGE_CREATEVISITOR, new testCreateVisitorMessage()); out.put(DocumentProtocol.MESSAGE_DESTROYVISITOR, new testDestroyVisitorMessage()); out.put(DocumentProtocol.MESSAGE_DOCUMENTLIST, new testDocumentListMessage()); out.put(DocumentProtocol.MESSAGE_EMPTYBUCKETS, new testEmptyBucketsMessage()); out.put(DocumentProtocol.MESSAGE_GETBUCKETLIST, new testGetBucketListMessage()); out.put(DocumentProtocol.MESSAGE_GETBUCKETSTATE, new testGetBucketStateMessage()); out.put(DocumentProtocol.MESSAGE_GETDOCUMENT, new testGetDocumentMessage()); out.put(DocumentProtocol.MESSAGE_MAPVISITOR, new testMapVisitorMessage()); out.put(DocumentProtocol.MESSAGE_PUTDOCUMENT, new testPutDocumentMessage()); out.put(DocumentProtocol.MESSAGE_QUERYRESULT, new testQueryResultMessage()); out.put(DocumentProtocol.MESSAGE_REMOVEDOCUMENT, new testRemoveDocumentMessage()); out.put(DocumentProtocol.MESSAGE_REMOVELOCATION, new testRemoveLocationMessage()); out.put(DocumentProtocol.MESSAGE_STATBUCKET, new testStatBucketMessage()); out.put(DocumentProtocol.MESSAGE_UPDATEDOCUMENT, new testUpdateDocumentMessage()); out.put(DocumentProtocol.MESSAGE_VISITORINFO, new testVisitorInfoMessage()); out.put(DocumentProtocol.REPLY_CREATEVISITOR, new testCreateVisitorReply()); out.put(DocumentProtocol.REPLY_DESTROYVISITOR, new testDestroyVisitorReply()); out.put(DocumentProtocol.REPLY_DOCUMENTIGNORED, new testDocumentIgnoredReply()); out.put(DocumentProtocol.REPLY_DOCUMENTLIST, new testDocumentListReply()); out.put(DocumentProtocol.REPLY_EMPTYBUCKETS, new testEmptyBucketsReply()); out.put(DocumentProtocol.REPLY_GETBUCKETLIST, new testGetBucketListReply()); out.put(DocumentProtocol.REPLY_GETBUCKETSTATE, new testGetBucketStateReply()); out.put(DocumentProtocol.REPLY_GETDOCUMENT, new testGetDocumentReply()); out.put(DocumentProtocol.REPLY_MAPVISITOR, new testMapVisitorReply()); out.put(DocumentProtocol.REPLY_PUTDOCUMENT, new testPutDocumentReply()); out.put(DocumentProtocol.REPLY_QUERYRESULT, new testQueryResultReply()); out.put(DocumentProtocol.REPLY_REMOVEDOCUMENT, new testRemoveDocumentReply()); out.put(DocumentProtocol.REPLY_REMOVELOCATION, new testRemoveLocationReply()); out.put(DocumentProtocol.REPLY_STATBUCKET, new testStatBucketReply()); out.put(DocumentProtocol.REPLY_UPDATEDOCUMENT, new testUpdateDocumentReply()); out.put(DocumentProtocol.REPLY_VISITORINFO, new testVisitorInfoReply()); out.put(DocumentProtocol.REPLY_WRONGDISTRIBUTION, new testWrongDistributionReply()); }
out.put(DocumentProtocol.MESSAGE_GETBUCKETSTATE, new testGetBucketStateMessage());
protected void registerTests(Map<Integer, RunnableTest> out) { out.put(DocumentProtocol.MESSAGE_CREATEVISITOR, new testCreateVisitorMessage()); out.put(DocumentProtocol.MESSAGE_DESTROYVISITOR, new testDestroyVisitorMessage()); out.put(DocumentProtocol.MESSAGE_DOCUMENTLIST, new testDocumentListMessage()); out.put(DocumentProtocol.MESSAGE_EMPTYBUCKETS, new testEmptyBucketsMessage()); out.put(DocumentProtocol.MESSAGE_GETBUCKETLIST, new testGetBucketListMessage()); out.put(DocumentProtocol.MESSAGE_GETBUCKETSTATE, new testGetBucketStateMessage()); out.put(DocumentProtocol.MESSAGE_GETDOCUMENT, new testGetDocumentMessage()); out.put(DocumentProtocol.MESSAGE_MAPVISITOR, new testMapVisitorMessage()); out.put(DocumentProtocol.MESSAGE_PUTDOCUMENT, new testPutDocumentMessage()); out.put(DocumentProtocol.MESSAGE_QUERYRESULT, new testQueryResultMessage()); out.put(DocumentProtocol.MESSAGE_REMOVEDOCUMENT, new testRemoveDocumentMessage()); out.put(DocumentProtocol.MESSAGE_REMOVELOCATION, new testRemoveLocationMessage()); out.put(DocumentProtocol.MESSAGE_STATBUCKET, new testStatBucketMessage()); out.put(DocumentProtocol.MESSAGE_UPDATEDOCUMENT, new testUpdateDocumentMessage()); out.put(DocumentProtocol.MESSAGE_VISITORINFO, new testVisitorInfoMessage()); out.put(DocumentProtocol.REPLY_CREATEVISITOR, new testCreateVisitorReply()); out.put(DocumentProtocol.REPLY_DESTROYVISITOR, new testDestroyVisitorReply()); out.put(DocumentProtocol.REPLY_DOCUMENTIGNORED, new testDocumentIgnoredReply()); out.put(DocumentProtocol.REPLY_DOCUMENTLIST, new testDocumentListReply()); out.put(DocumentProtocol.REPLY_EMPTYBUCKETS, new testEmptyBucketsReply()); out.put(DocumentProtocol.REPLY_GETBUCKETLIST, new testGetBucketListReply()); out.put(DocumentProtocol.REPLY_GETBUCKETSTATE, new testGetBucketStateReply()); out.put(DocumentProtocol.REPLY_GETDOCUMENT, new testGetDocumentReply()); out.put(DocumentProtocol.REPLY_MAPVISITOR, new testMapVisitorReply()); out.put(DocumentProtocol.REPLY_PUTDOCUMENT, new testPutDocumentReply()); out.put(DocumentProtocol.REPLY_QUERYRESULT, new testQueryResultReply()); out.put(DocumentProtocol.REPLY_REMOVEDOCUMENT, new testRemoveDocumentReply()); out.put(DocumentProtocol.REPLY_REMOVELOCATION, new testRemoveLocationReply()); out.put(DocumentProtocol.REPLY_STATBUCKET, new testStatBucketReply()); out.put(DocumentProtocol.REPLY_UPDATEDOCUMENT, new testUpdateDocumentReply()); out.put(DocumentProtocol.REPLY_VISITORINFO, new testVisitorInfoReply()); out.put(DocumentProtocol.REPLY_WRONGDISTRIBUTION, new testWrongDistributionReply()); }
class Messages60TestCase extends MessagesTestBase { @Override @Override protected Version version() { return new Version(6, 221, 0); } @Override protected boolean shouldTestCoverage() { return true; } protected static int BASE_MESSAGE_LENGTH = 5; public class testRemoveLocationMessage implements RunnableTest { @Override public void run() { { RemoveLocationMessage msg = new RemoveLocationMessage("id.group == \"mygroup\""); assertEquals(BASE_MESSAGE_LENGTH + 29, serialize("RemoveLocationMessage", msg)); for (Language lang : LANGUAGES) { msg = (RemoveLocationMessage)deserialize("RemoveLocationMessage", DocumentProtocol.MESSAGE_REMOVELOCATION, lang); assertEquals("id.group == \"mygroup\"", msg.getDocumentSelection()); } } } } public class testGetBucketListMessage implements RunnableTest { private static final String BUCKET_SPACE = "beartato"; @Override public void run() { GetBucketListMessage msg = new GetBucketListMessage(new BucketId(16, 123)); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 12 + serializedLength(BUCKET_SPACE), serialize("GetBucketListMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetBucketListMessage)deserialize("GetBucketListMessage", DocumentProtocol.MESSAGE_GETBUCKETLIST, lang); assertEquals(new BucketId(16, 123), msg.getBucketId()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } } } public class testStatBucketMessage implements RunnableTest { private static final String BUCKET_SPACE = "andrei"; @Override public void run() { StatBucketMessage msg = new StatBucketMessage(new BucketId(16, 123), "id.user=123"); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 27 + serializedLength(BUCKET_SPACE), serialize("StatBucketMessage", msg)); for (Language lang : LANGUAGES) { msg = (StatBucketMessage)deserialize("StatBucketMessage", DocumentProtocol.MESSAGE_STATBUCKET, lang); assertEquals(new BucketId(16, 123), msg.getBucketId()); assertEquals("id.user=123", msg.getDocumentSelection()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } } } public class testGetBucketStateMessage implements RunnableTest { @Override public void run() { GetBucketStateMessage msg = new GetBucketStateMessage(new BucketId(16, 666)); assertEquals(BASE_MESSAGE_LENGTH + 12, serialize("GetBucketStateMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetBucketStateMessage)deserialize("GetBucketStateMessage", DocumentProtocol.MESSAGE_GETBUCKETSTATE, lang); assertEquals(16, msg.getBucketId().getUsedBits()); assertEquals(4611686018427388570l, msg.getBucketId().getId()); } } } public class testCreateVisitorMessage implements RunnableTest { private static final String BUCKET_SPACE = "bjarne"; @Override public void run() { CreateVisitorMessage msg = new CreateVisitorMessage("SomeLibrary", "myvisitor", "newyork", "london"); msg.setDocumentSelection("true and false or true"); msg.getParameters().put("myvar", Utf8.toBytes("somevalue")); msg.getParameters().put("anothervar", Utf8.toBytes("34")); msg.getBuckets().add(new BucketId(16, 1234)); msg.setVisitRemoves(true); msg.setFieldSet("foo bar"); msg.setMaxBucketsPerVisitor(2); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 178 + serializedLength(BUCKET_SPACE), serialize("CreateVisitorMessage", msg)); for (Language lang : LANGUAGES) { msg = (CreateVisitorMessage)deserialize("CreateVisitorMessage", DocumentProtocol.MESSAGE_CREATEVISITOR, lang); assertEquals("SomeLibrary", msg.getLibraryName()); assertEquals("myvisitor", msg.getInstanceId()); assertEquals("newyork", msg.getControlDestination()); assertEquals("london", msg.getDataDestination()); assertEquals("true and false or true", msg.getDocumentSelection()); assertEquals(8, msg.getMaxPendingReplyCount()); assertEquals(true, msg.getVisitRemoves()); assertEquals("foo bar", msg.getFieldSet()); assertEquals(false, msg.getVisitInconsistentBuckets()); assertEquals(1, msg.getBuckets().size()); assertEquals(new BucketId(16, 1234), msg.getBuckets().iterator().next()); assertEquals("somevalue", Utf8.toString(msg.getParameters().get("myvar"))); assertEquals("34", Utf8.toString(msg.getParameters().get("anothervar"))); assertEquals(2, msg.getMaxBucketsPerVisitor()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } msg.getBuckets().clear(); assertEquals("CreateVisitorMessage(" + "No buckets, " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar" + ")", msg.toString()); msg.getBuckets().add(new BucketId(16, 1234)); assertEquals("CreateVisitorMessage(" + "Bucket BucketId(0x40000000000004d2), " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar" + ")", msg.toString()); msg.getBuckets().add(new BucketId(16, 1235)); msg.getBuckets().add(new BucketId(16, 1236)); msg.getBuckets().add(new BucketId(16, 1237)); msg.getBuckets().add(new BucketId(16, 1238)); msg.setFromTimestamp(10001); msg.setToTimestamp(20002); msg.setVisitInconsistentBuckets(true); assertEquals("CreateVisitorMessage(" + "5 buckets: BucketId(0x40000000000004d2) BucketId(0x40000000000004d3) BucketId(0x40000000000004d4) ..., " + "time 10001-20002, " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar, " + "visit inconsistent buckets" + ")", msg.toString()); } } public class testCreateVisitorReply implements RunnableTest { @Override public void run() { CreateVisitorReply reply = new CreateVisitorReply(DocumentProtocol.REPLY_CREATEVISITOR); reply.setLastBucket(new BucketId(16, 123)); reply.getVisitorStatistics().setBucketsVisited(3); reply.getVisitorStatistics().setDocumentsVisited(1000); reply.getVisitorStatistics().setBytesVisited(1024000); reply.getVisitorStatistics().setDocumentsReturned(123); reply.getVisitorStatistics().setBytesReturned(512000); assertEquals(65, serialize("CreateVisitorReply", reply)); for (Language lang : LANGUAGES) { reply = (CreateVisitorReply)deserialize("CreateVisitorReply", DocumentProtocol.REPLY_CREATEVISITOR, lang); assertNotNull(reply); assertEquals(new BucketId(16, 123), reply.getLastBucket()); assertEquals(3, reply.getVisitorStatistics().getBucketsVisited()); assertEquals(1000, reply.getVisitorStatistics().getDocumentsVisited()); assertEquals(1024000, reply.getVisitorStatistics().getBytesVisited()); assertEquals(123, reply.getVisitorStatistics().getDocumentsReturned()); assertEquals(512000, reply.getVisitorStatistics().getBytesReturned()); } } } public class testDestroyVisitorReply implements RunnableTest { @Override public void run() { testVisitorReply("DestroyVisitorReply", DocumentProtocol.REPLY_DESTROYVISITOR); } } public class testDocumentIgnoredReply implements RunnableTest { @Override public void run() { DocumentIgnoredReply reply = new DocumentIgnoredReply(); assertEquals(BASE_MESSAGE_LENGTH, serialize("DocumentIgnoredReply", reply)); for (Language lang : LANGUAGES) { reply = (DocumentIgnoredReply)deserialize("DocumentIgnoredReply", DocumentProtocol.REPLY_DOCUMENTIGNORED, lang); } } } public class testDocumentListReply implements RunnableTest { @Override public void run() { testVisitorReply("DocumentListReply", DocumentProtocol.REPLY_DOCUMENTLIST); } } public class testEmptyBucketsReply implements RunnableTest { @Override public void run() { testVisitorReply("EmptyBucketsReply", DocumentProtocol.REPLY_EMPTYBUCKETS); } } public class testDestroyVisitorMessage implements RunnableTest { @Override public void run() { DestroyVisitorMessage msg = new DestroyVisitorMessage("myvisitor"); assertEquals(BASE_MESSAGE_LENGTH + 17, serialize("DestroyVisitorMessage", msg)); for (Language lang : LANGUAGES) { msg = (DestroyVisitorMessage)deserialize("DestroyVisitorMessage", DocumentProtocol.MESSAGE_DESTROYVISITOR, lang); assertEquals("myvisitor", msg.getInstanceId()); } } } public class testDocumentListMessage implements RunnableTest { @Override public void run() { DocumentListMessage msg = (DocumentListMessage)deserialize("DocumentListMessage", DocumentProtocol.MESSAGE_DOCUMENTLIST, Language.CPP); assertEquals("id:scheme:testdoc:n=1234:1", msg.getDocuments().get(0).getDocument().getId().toString()); assertEquals(1234, msg.getDocuments().get(0).getTimestamp()); assertFalse(msg.getDocuments().get(0).isRemoveEntry()); assertEquals(BASE_MESSAGE_LENGTH + 69, serialize("DocumentListMessage", msg)); msg = (DocumentListMessage)deserialize("DocumentListMessage", DocumentProtocol.MESSAGE_DOCUMENTLIST, Language.JAVA); assertEquals("id:scheme:testdoc:n=1234:1", msg.getDocuments().get(0).getDocument().getId().toString()); assertEquals(1234, msg.getDocuments().get(0).getTimestamp()); assertFalse(msg.getDocuments().get(0).isRemoveEntry()); } } public class testEmptyBucketsMessage implements RunnableTest { @Override public void run() { List<BucketId> bids = new ArrayList<>(); for (int i = 0; i < 13; ++i) { bids.add(new BucketId(16, i)); } EmptyBucketsMessage ebm = new EmptyBucketsMessage(bids); assertEquals(BASE_MESSAGE_LENGTH + 112, serialize("EmptyBucketsMessage", ebm)); for (Language lang : LANGUAGES) { ebm = (EmptyBucketsMessage)deserialize("EmptyBucketsMessage", DocumentProtocol.MESSAGE_EMPTYBUCKETS, lang); for (int i = 0; i < 13; ++i) { assertEquals(new BucketId(16, i), ebm.getBucketIds().get(i)); } } } } public class testGetDocumentMessage implements RunnableTest { @Override public void run() { GetDocumentMessage msg = new GetDocumentMessage(new DocumentId("id:ns:testdoc::"), "foo bar"); assertEquals(BASE_MESSAGE_LENGTH + 31, serialize("GetDocumentMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetDocumentMessage)deserialize("GetDocumentMessage", DocumentProtocol.MESSAGE_GETDOCUMENT, lang); assertEquals("id:ns:testdoc::", msg.getDocumentId().toString()); assertEquals("foo bar", msg.getFieldSet()); } } } public class testRemoveDocumentMessage implements RunnableTest { @Override public void run() { final RemoveDocumentMessage msg = new RemoveDocumentMessage(new DocumentId("id:ns:testdoc::")); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); assertEquals(BASE_MESSAGE_LENGTH + 20 + serializedLength(msg.getCondition().getSelection()), serialize("RemoveDocumentMessage", msg)); for (Language lang : LANGUAGES) { final RemoveDocumentMessage deserializedMsg = (RemoveDocumentMessage)deserialize("RemoveDocumentMessage", DocumentProtocol.MESSAGE_REMOVEDOCUMENT, lang); assertEquals(deserializedMsg.getDocumentId().toString(), msg.getDocumentId().toString()); } } } public class testMapVisitorMessage implements RunnableTest { @Override public void run() { MapVisitorMessage msg = (MapVisitorMessage)deserialize("MapVisitorMessage", DocumentProtocol.MESSAGE_MAPVISITOR, Language.CPP); assertEquals("3", msg.getData().get("foo")); assertEquals("5", msg.getData().get("bar")); assertEquals(BASE_MESSAGE_LENGTH + 32, serialize("MapVisitorMessage", msg)); msg = (MapVisitorMessage)deserialize("MapVisitorMessage", DocumentProtocol.MESSAGE_MAPVISITOR, Language.JAVA); assertEquals("3", msg.getData().get("foo")); assertEquals("5", msg.getData().get("bar")); } } public class testVisitorInfoMessage implements RunnableTest { @Override public void run() { VisitorInfoMessage msg = new VisitorInfoMessage(); msg.getFinishedBuckets().add(new BucketId(16, 1)); msg.getFinishedBuckets().add(new BucketId(16, 2)); msg.getFinishedBuckets().add(new BucketId(16, 4)); msg.setErrorMessage("error message: \u00e6\u00c6\u00f8\u00d8\u00e5\u00c5\u00f6\u00d6"); assertEquals(BASE_MESSAGE_LENGTH + 67, serialize("VisitorInfoMessage", msg)); for (Language lang : LANGUAGES) { msg = (VisitorInfoMessage)deserialize("VisitorInfoMessage", DocumentProtocol.MESSAGE_VISITORINFO, lang); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 1))); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 2))); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 4))); assertEquals("error message: \u00e6\u00c6\u00f8\u00d8\u00e5\u00c5\u00f6\u00d6", msg.getErrorMessage()); } } } private static String CONDITION_STRING = "There's just one condition"; public class testPutDocumentMessage implements RunnableTest { void verifyCreateIfNonExistentFlag() { var msg = new PutDocumentMessage(new DocumentPut(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::"))); msg.setCreateIfNonExistent(true); int size_of_create_if_non_existent_flag = 1; int expected_serialized_size = BASE_MESSAGE_LENGTH + 45 + serializedLength(msg.getCondition().getSelection()) + size_of_create_if_non_existent_flag; assertEquals(expected_serialized_size, serialize("PutDocumentMessage-create", msg)); assertEquals(expected_serialized_size - 1, serialize("PutDocumentMessage-create-truncate", msg, data -> DataTamper.truncate(data, 1))); assertEquals(expected_serialized_size + 1, serialize("PutDocumentMessage-create-pad", msg, data -> DataTamper.pad(data, 1))); for (Language lang: LANGUAGES) { var decoded = (PutDocumentMessage)deserialize("PutDocumentMessage-create", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); var decoded_trunc = (PutDocumentMessage)deserialize("PutDocumentMessage-create-truncate", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); var decoded_pad = (PutDocumentMessage)deserialize("PutDocumentMessage-create-pad", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); assertEquals(true, decoded.getCreateIfNonExistent()); assertEquals(false, decoded_trunc.getCreateIfNonExistent()); assertEquals(true, decoded_pad.getCreateIfNonExistent()); assertTrue(decoded.getDocumentPut().equals(decoded_pad.getDocumentPut())); assertFalse(decoded.getDocumentPut().equals(decoded_trunc.getDocumentPut())); } } @Override public void run() { PutDocumentMessage msg = new PutDocumentMessage(new DocumentPut(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::"))); msg.setTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); int size_of_create_if_non_existent_flag = 1; int expected_serialized_size = BASE_MESSAGE_LENGTH + 45 + serializedLength(msg.getCondition().getSelection()) + size_of_create_if_non_existent_flag; assertEquals(expected_serialized_size, serialize("PutDocumentMessage", msg)); for (Language lang : LANGUAGES) { final PutDocumentMessage deserializedMsg = (PutDocumentMessage)deserialize("PutDocumentMessage", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); assertEquals(msg.getDocumentPut().getDocument().getDataType().getName(), deserializedMsg.getDocumentPut().getDocument().getDataType().getName()); assertEquals(msg.getDocumentPut().getDocument().getId().toString(), deserializedMsg.getDocumentPut().getDocument().getId().toString()); assertEquals(msg.getTimestamp(), deserializedMsg.getTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); assertEquals(false, deserializedMsg.getCreateIfNonExistent()); } verifyCreateIfNonExistentFlag(); } } public class testPutDocumentReply implements RunnableTest { @Override public void run() { WriteDocumentReply reply = new WriteDocumentReply(DocumentProtocol.REPLY_PUTDOCUMENT); reply.setHighestModificationTimestamp(30); assertEquals(13, serialize("PutDocumentReply", reply)); for (Language lang : LANGUAGES) { WriteDocumentReply obj = (WriteDocumentReply)deserialize("PutDocumentReply", DocumentProtocol.REPLY_PUTDOCUMENT, lang); assertNotNull(obj); assertEquals(30, obj.getHighestModificationTimestamp()); } } } public class testUpdateDocumentMessage implements RunnableTest { @Override public void run() { DocumentType docType = protocol.getDocumentTypeManager().getDocumentType("testdoc"); DocumentUpdate update = new DocumentUpdate(docType, new DocumentId("id:ns:testdoc::")); update.addFieldPathUpdate(new RemoveFieldPathUpdate(docType, "intfield", "testdoc.intfield > 0")); final UpdateDocumentMessage msg = new UpdateDocumentMessage(update); msg.setNewTimestamp(777); msg.setOldTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); assertEquals(BASE_MESSAGE_LENGTH + 93 + serializedLength(msg.getCondition().getSelection()), serialize("UpdateDocumentMessage", msg)); for (Language lang : LANGUAGES) { final UpdateDocumentMessage deserializedMsg = (UpdateDocumentMessage) deserialize("UpdateDocumentMessage", DocumentProtocol.MESSAGE_UPDATEDOCUMENT, lang); assertEquals(msg.getDocumentUpdate(), deserializedMsg.getDocumentUpdate()); assertEquals(msg.getNewTimestamp(), deserializedMsg.getNewTimestamp()); assertEquals(msg.getOldTimestamp(), deserializedMsg.getOldTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); } } } public class testUpdateDocumentReply implements RunnableTest { @Override public void run() { UpdateDocumentReply reply = new UpdateDocumentReply(); reply.setHighestModificationTimestamp(30); reply.setWasFound(false); assertEquals(14, serialize("UpdateDocumentReply", reply)); for (Language lang : LANGUAGES) { UpdateDocumentReply obj = (UpdateDocumentReply)deserialize("UpdateDocumentReply", DocumentProtocol.REPLY_UPDATEDOCUMENT, lang); assertNotNull(obj); assertEquals(30, reply.getHighestModificationTimestamp()); assertEquals(false, obj.wasFound()); } } } public class testVisitorInfoReply implements RunnableTest { @Override public void run() { testVisitorReply("VisitorInfoReply", DocumentProtocol.REPLY_VISITORINFO); } } public class testWrongDistributionReply implements RunnableTest { @Override public void run() { WrongDistributionReply reply = new WrongDistributionReply("distributor:3 storage:2"); assertEquals(32, serialize("WrongDistributionReply", reply)); for (Language lang : LANGUAGES) { reply = (WrongDistributionReply)deserialize("WrongDistributionReply", DocumentProtocol.REPLY_WRONGDISTRIBUTION, lang); assertEquals("distributor:3 storage:2", reply.getSystemState()); } } } public class testRemoveDocumentReply implements RunnableTest { @Override public void run() { RemoveDocumentReply reply = new RemoveDocumentReply(); reply.setHighestModificationTimestamp(30); reply.setWasFound(false); assertEquals(14, serialize("RemoveDocumentReply", reply)); for (Language lang : LANGUAGES) { RemoveDocumentReply obj = (RemoveDocumentReply)deserialize("RemoveDocumentReply", DocumentProtocol.REPLY_REMOVEDOCUMENT, lang); assertNotNull(obj); assertEquals(30, obj.getHighestModificationTimestamp()); assertEquals(false, obj.wasFound()); } } } public class testRemoveLocationReply implements RunnableTest { @Override public void run() { testDocumentReply("RemoveLocationReply", DocumentProtocol.REPLY_REMOVELOCATION); } } public class testStatBucketReply implements RunnableTest { @Override public void run() { StatBucketReply msg = new StatBucketReply(); msg.setResults("These are the votes of the Norwegian jury"); assertEquals(50, serialize("StatBucketReply", msg)); for (Language lang : LANGUAGES) { msg = (StatBucketReply)deserialize("StatBucketReply", DocumentProtocol.REPLY_STATBUCKET, lang); assertEquals("These are the votes of the Norwegian jury", msg.getResults()); } } } public class testQueryResultReply implements RunnableTest { @Override public void run() { testVisitorReply("QueryResultReply", DocumentProtocol.REPLY_QUERYRESULT); } } public class testQueryResultMessage implements RunnableTest { @Override public void run() throws Exception { test_result_with_match_features(); Routable routable = deserialize("QueryResultMessage-1", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); QueryResultMessage msg = (QueryResultMessage)routable; assertEquals(0, msg.getResult().getHitCount()); routable = deserialize("QueryResultMessage-2", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); com.yahoo.vdslib.SearchResult.Hit h = msg.getResult().getHit(0); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); h = msg.getResult().getHit(1); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); routable = deserialize("QueryResultMessage-3", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); h = msg.getResult().getHit(0); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); h = msg.getResult().getHit(1); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); routable = deserialize("QueryResultMessage-4", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(3, msg.getResult().getHitCount()); h = msg.getResult().getHit(0); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); byte[] b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '2' }, b); h = msg.getResult().getHit(1); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '1' }, b); h = msg.getResult().getHit(2); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(90.0, h.getRank(), 1E-6); assertEquals("doc18", h.getDocId()); b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '3' }, b); } void assertEqualsData(byte[] exp, byte[] act) { assertEquals(exp.length, act.length); for (int i = 0; i < exp.length; ++i) { assertEquals(exp[i], act[i]); } } void test_result_with_match_features() { Routable routable = deserialize("QueryResultMessage-6", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); var msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); var h = msg.getResult().getHit(0); assertTrue(h instanceof SearchResult.Hit); assertEquals(7.0, h.getRank(), 1E-6); assertEquals("doc2", h.getDocId()); assertTrue(h.getMatchFeatures().isPresent()); var mf = h.getMatchFeatures().get(); assertEquals(12.0, mf.field("foo").asDouble(), 1E-6); assertEqualsData(new byte[] { 'T', 'h', 'e', 'r', 'e' }, mf.field("bar").asData()); h = msg.getResult().getHit(1); assertTrue(h instanceof SearchResult.Hit); assertEquals(5.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertTrue(h.getMatchFeatures().isPresent()); mf = h.getMatchFeatures().get(); assertEquals(1.0, mf.field("foo").asDouble(), 1E-6); assertEqualsData(new byte[] { 'H', 'i' }, mf.field("bar").asData()); } } public class testGetBucketListReply implements RunnableTest { public void run() { GetBucketListReply reply = new GetBucketListReply(); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(16, 123), "foo")); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(17, 1123), "bar")); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(18, 11123), "zoink")); assertEquals(56, serialize("GetBucketListReply", reply)); for (Language lang : LANGUAGES) { reply = (GetBucketListReply)deserialize("GetBucketListReply", DocumentProtocol.REPLY_GETBUCKETLIST, lang); assertEquals(reply.getBuckets().get(0), new GetBucketListReply.BucketInfo(new BucketId(16, 123), "foo")); assertEquals(reply.getBuckets().get(1), new GetBucketListReply.BucketInfo(new BucketId(17, 1123), "bar")); assertEquals(reply.getBuckets().get(2), new GetBucketListReply.BucketInfo(new BucketId(18, 11123), "zoink")); } } } public class testGetBucketStateReply implements RunnableTest { public void run() { GlobalId foo = new GlobalId(IdString.createIdString("id:ns:testdoc::foo")); GlobalId bar = new GlobalId(IdString.createIdString("id:ns:testdoc::bar")); GetBucketStateReply reply = new GetBucketStateReply(); List<DocumentState> state = new ArrayList<>(2); state.add(new DocumentState(foo, 777, false)); state.add(new DocumentState(bar, 888, true)); reply.setBucketState(state); assertEquals(53, serialize("GetBucketStateReply", reply)); for (Language lang : LANGUAGES) { reply = (GetBucketStateReply)deserialize("GetBucketStateReply", DocumentProtocol.REPLY_GETBUCKETSTATE, lang); assertEquals(777, reply.getBucketState().get(0).getTimestamp()); assertEquals(foo, reply.getBucketState().get(0).getGid()); assertEquals(false, reply.getBucketState().get(0).isRemoveEntry()); assertEquals(888, reply.getBucketState().get(1).getTimestamp()); assertEquals(bar, reply.getBucketState().get(1).getGid()); assertEquals(true, reply.getBucketState().get(1).isRemoveEntry()); } } } public class testGetDocumentReply implements RunnableTest { public void run() { GetDocumentReply reply = new GetDocumentReply(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::")); assertEquals(47, serialize("GetDocumentReply", reply)); for (Language lang : LANGUAGES) { reply = (GetDocumentReply)deserialize("GetDocumentReply", DocumentProtocol.REPLY_GETDOCUMENT, lang); assertEquals("testdoc", reply.getDocument().getDataType().getName()); assertEquals("id:ns:testdoc::", reply.getDocument().getId().toString()); } } } public class testMapVisitorReply implements RunnableTest { public void run() { testVisitorReply("MapVisitorReply", DocumentProtocol.REPLY_MAPVISITOR); } } protected void testDocumentReply(String filename, int type) { DocumentReply reply = new DocumentReply(type); assertEquals(5, serialize(filename, reply)); for (Language lang : LANGUAGES) { reply = (DocumentReply)deserialize(filename, type, lang); assertNotNull(reply); } } protected void testVisitorReply(String filename, int type) { VisitorReply reply = new VisitorReply(type); assertEquals(5, serialize(filename, reply)); for (Language lang : LANGUAGES) { reply = (VisitorReply)deserialize(filename, type, lang); assertNotNull(reply); } } static int serializedLength(String str) { return 4 + str.length(); } }
class Messages60TestCase extends MessagesTestBase { @Override @Override protected Version version() { return new Version(6, 221, 0); } @Override protected boolean shouldTestCoverage() { return true; } protected static int BASE_MESSAGE_LENGTH = 5; public class testRemoveLocationMessage implements RunnableTest { @Override public void run() { { RemoveLocationMessage msg = new RemoveLocationMessage("id.group == \"mygroup\""); assertEquals(BASE_MESSAGE_LENGTH + 29, serialize("RemoveLocationMessage", msg)); for (Language lang : LANGUAGES) { msg = (RemoveLocationMessage)deserialize("RemoveLocationMessage", DocumentProtocol.MESSAGE_REMOVELOCATION, lang); assertEquals("id.group == \"mygroup\"", msg.getDocumentSelection()); } } } } public class testGetBucketListMessage implements RunnableTest { private static final String BUCKET_SPACE = "beartato"; @Override public void run() { GetBucketListMessage msg = new GetBucketListMessage(new BucketId(16, 123)); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 12 + serializedLength(BUCKET_SPACE), serialize("GetBucketListMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetBucketListMessage)deserialize("GetBucketListMessage", DocumentProtocol.MESSAGE_GETBUCKETLIST, lang); assertEquals(new BucketId(16, 123), msg.getBucketId()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } } } public class testStatBucketMessage implements RunnableTest { private static final String BUCKET_SPACE = "andrei"; @Override public void run() { StatBucketMessage msg = new StatBucketMessage(new BucketId(16, 123), "id.user=123"); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 27 + serializedLength(BUCKET_SPACE), serialize("StatBucketMessage", msg)); for (Language lang : LANGUAGES) { msg = (StatBucketMessage)deserialize("StatBucketMessage", DocumentProtocol.MESSAGE_STATBUCKET, lang); assertEquals(new BucketId(16, 123), msg.getBucketId()); assertEquals("id.user=123", msg.getDocumentSelection()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } } } public class testGetBucketStateMessage implements RunnableTest { @Override public void run() { GetBucketStateMessage msg = new GetBucketStateMessage(new BucketId(16, 666)); assertEquals(BASE_MESSAGE_LENGTH + 12, serialize("GetBucketStateMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetBucketStateMessage)deserialize("GetBucketStateMessage", DocumentProtocol.MESSAGE_GETBUCKETSTATE, lang); assertEquals(16, msg.getBucketId().getUsedBits()); assertEquals(4611686018427388570l, msg.getBucketId().getId()); } } } public class testCreateVisitorMessage implements RunnableTest { private static final String BUCKET_SPACE = "bjarne"; @Override public void run() { CreateVisitorMessage msg = new CreateVisitorMessage("SomeLibrary", "myvisitor", "newyork", "london"); msg.setDocumentSelection("true and false or true"); msg.getParameters().put("myvar", Utf8.toBytes("somevalue")); msg.getParameters().put("anothervar", Utf8.toBytes("34")); msg.getBuckets().add(new BucketId(16, 1234)); msg.setVisitRemoves(true); msg.setFieldSet("foo bar"); msg.setMaxBucketsPerVisitor(2); msg.setBucketSpace(BUCKET_SPACE); assertEquals(BASE_MESSAGE_LENGTH + 178 + serializedLength(BUCKET_SPACE), serialize("CreateVisitorMessage", msg)); for (Language lang : LANGUAGES) { msg = (CreateVisitorMessage)deserialize("CreateVisitorMessage", DocumentProtocol.MESSAGE_CREATEVISITOR, lang); assertEquals("SomeLibrary", msg.getLibraryName()); assertEquals("myvisitor", msg.getInstanceId()); assertEquals("newyork", msg.getControlDestination()); assertEquals("london", msg.getDataDestination()); assertEquals("true and false or true", msg.getDocumentSelection()); assertEquals(8, msg.getMaxPendingReplyCount()); assertEquals(true, msg.getVisitRemoves()); assertEquals("foo bar", msg.getFieldSet()); assertEquals(false, msg.getVisitInconsistentBuckets()); assertEquals(1, msg.getBuckets().size()); assertEquals(new BucketId(16, 1234), msg.getBuckets().iterator().next()); assertEquals("somevalue", Utf8.toString(msg.getParameters().get("myvar"))); assertEquals("34", Utf8.toString(msg.getParameters().get("anothervar"))); assertEquals(2, msg.getMaxBucketsPerVisitor()); assertEquals(BUCKET_SPACE, msg.getBucketSpace()); } msg.getBuckets().clear(); assertEquals("CreateVisitorMessage(" + "No buckets, " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar" + ")", msg.toString()); msg.getBuckets().add(new BucketId(16, 1234)); assertEquals("CreateVisitorMessage(" + "Bucket BucketId(0x40000000000004d2), " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar" + ")", msg.toString()); msg.getBuckets().add(new BucketId(16, 1235)); msg.getBuckets().add(new BucketId(16, 1236)); msg.getBuckets().add(new BucketId(16, 1237)); msg.getBuckets().add(new BucketId(16, 1238)); msg.setFromTimestamp(10001); msg.setToTimestamp(20002); msg.setVisitInconsistentBuckets(true); assertEquals("CreateVisitorMessage(" + "5 buckets: BucketId(0x40000000000004d2) BucketId(0x40000000000004d3) BucketId(0x40000000000004d4) ..., " + "time 10001-20002, " + "selection 'true and false or true', " + "bucket space 'bjarne', " + "library SomeLibrary, including removes, " + "get fields: foo bar, " + "visit inconsistent buckets" + ")", msg.toString()); } } public class testCreateVisitorReply implements RunnableTest { @Override public void run() { CreateVisitorReply reply = new CreateVisitorReply(DocumentProtocol.REPLY_CREATEVISITOR); reply.setLastBucket(new BucketId(16, 123)); reply.getVisitorStatistics().setBucketsVisited(3); reply.getVisitorStatistics().setDocumentsVisited(1000); reply.getVisitorStatistics().setBytesVisited(1024000); reply.getVisitorStatistics().setDocumentsReturned(123); reply.getVisitorStatistics().setBytesReturned(512000); assertEquals(65, serialize("CreateVisitorReply", reply)); for (Language lang : LANGUAGES) { reply = (CreateVisitorReply)deserialize("CreateVisitorReply", DocumentProtocol.REPLY_CREATEVISITOR, lang); assertNotNull(reply); assertEquals(new BucketId(16, 123), reply.getLastBucket()); assertEquals(3, reply.getVisitorStatistics().getBucketsVisited()); assertEquals(1000, reply.getVisitorStatistics().getDocumentsVisited()); assertEquals(1024000, reply.getVisitorStatistics().getBytesVisited()); assertEquals(123, reply.getVisitorStatistics().getDocumentsReturned()); assertEquals(512000, reply.getVisitorStatistics().getBytesReturned()); } } } public class testDestroyVisitorReply implements RunnableTest { @Override public void run() { testVisitorReply("DestroyVisitorReply", DocumentProtocol.REPLY_DESTROYVISITOR); } } public class testDocumentIgnoredReply implements RunnableTest { @Override public void run() { DocumentIgnoredReply reply = new DocumentIgnoredReply(); assertEquals(BASE_MESSAGE_LENGTH, serialize("DocumentIgnoredReply", reply)); for (Language lang : LANGUAGES) { reply = (DocumentIgnoredReply)deserialize("DocumentIgnoredReply", DocumentProtocol.REPLY_DOCUMENTIGNORED, lang); } } } public class testDocumentListReply implements RunnableTest { @Override public void run() { testVisitorReply("DocumentListReply", DocumentProtocol.REPLY_DOCUMENTLIST); } } public class testEmptyBucketsReply implements RunnableTest { @Override public void run() { testVisitorReply("EmptyBucketsReply", DocumentProtocol.REPLY_EMPTYBUCKETS); } } public class testDestroyVisitorMessage implements RunnableTest { @Override public void run() { DestroyVisitorMessage msg = new DestroyVisitorMessage("myvisitor"); assertEquals(BASE_MESSAGE_LENGTH + 17, serialize("DestroyVisitorMessage", msg)); for (Language lang : LANGUAGES) { msg = (DestroyVisitorMessage)deserialize("DestroyVisitorMessage", DocumentProtocol.MESSAGE_DESTROYVISITOR, lang); assertEquals("myvisitor", msg.getInstanceId()); } } } public class testDocumentListMessage implements RunnableTest { @Override public void run() { DocumentListMessage msg = (DocumentListMessage)deserialize("DocumentListMessage", DocumentProtocol.MESSAGE_DOCUMENTLIST, Language.CPP); assertEquals("id:scheme:testdoc:n=1234:1", msg.getDocuments().get(0).getDocument().getId().toString()); assertEquals(1234, msg.getDocuments().get(0).getTimestamp()); assertFalse(msg.getDocuments().get(0).isRemoveEntry()); assertEquals(BASE_MESSAGE_LENGTH + 69, serialize("DocumentListMessage", msg)); msg = (DocumentListMessage)deserialize("DocumentListMessage", DocumentProtocol.MESSAGE_DOCUMENTLIST, Language.JAVA); assertEquals("id:scheme:testdoc:n=1234:1", msg.getDocuments().get(0).getDocument().getId().toString()); assertEquals(1234, msg.getDocuments().get(0).getTimestamp()); assertFalse(msg.getDocuments().get(0).isRemoveEntry()); } } public class testEmptyBucketsMessage implements RunnableTest { @Override public void run() { List<BucketId> bids = new ArrayList<>(); for (int i = 0; i < 13; ++i) { bids.add(new BucketId(16, i)); } EmptyBucketsMessage ebm = new EmptyBucketsMessage(bids); assertEquals(BASE_MESSAGE_LENGTH + 112, serialize("EmptyBucketsMessage", ebm)); for (Language lang : LANGUAGES) { ebm = (EmptyBucketsMessage)deserialize("EmptyBucketsMessage", DocumentProtocol.MESSAGE_EMPTYBUCKETS, lang); for (int i = 0; i < 13; ++i) { assertEquals(new BucketId(16, i), ebm.getBucketIds().get(i)); } } } } public class testGetDocumentMessage implements RunnableTest { @Override public void run() { GetDocumentMessage msg = new GetDocumentMessage(new DocumentId("id:ns:testdoc::"), "foo bar"); assertEquals(BASE_MESSAGE_LENGTH + 31, serialize("GetDocumentMessage", msg)); for (Language lang : LANGUAGES) { msg = (GetDocumentMessage)deserialize("GetDocumentMessage", DocumentProtocol.MESSAGE_GETDOCUMENT, lang); assertEquals("id:ns:testdoc::", msg.getDocumentId().toString()); assertEquals("foo bar", msg.getFieldSet()); } } } public class testRemoveDocumentMessage implements RunnableTest { @Override public void run() { final RemoveDocumentMessage msg = new RemoveDocumentMessage(new DocumentId("id:ns:testdoc::")); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); assertEquals(BASE_MESSAGE_LENGTH + 20 + serializedLength(msg.getCondition().getSelection()), serialize("RemoveDocumentMessage", msg)); for (Language lang : LANGUAGES) { final RemoveDocumentMessage deserializedMsg = (RemoveDocumentMessage)deserialize("RemoveDocumentMessage", DocumentProtocol.MESSAGE_REMOVEDOCUMENT, lang); assertEquals(deserializedMsg.getDocumentId().toString(), msg.getDocumentId().toString()); } } } public class testMapVisitorMessage implements RunnableTest { @Override public void run() { MapVisitorMessage msg = (MapVisitorMessage)deserialize("MapVisitorMessage", DocumentProtocol.MESSAGE_MAPVISITOR, Language.CPP); assertEquals("3", msg.getData().get("foo")); assertEquals("5", msg.getData().get("bar")); assertEquals(BASE_MESSAGE_LENGTH + 32, serialize("MapVisitorMessage", msg)); msg = (MapVisitorMessage)deserialize("MapVisitorMessage", DocumentProtocol.MESSAGE_MAPVISITOR, Language.JAVA); assertEquals("3", msg.getData().get("foo")); assertEquals("5", msg.getData().get("bar")); } } public class testVisitorInfoMessage implements RunnableTest { @Override public void run() { VisitorInfoMessage msg = new VisitorInfoMessage(); msg.getFinishedBuckets().add(new BucketId(16, 1)); msg.getFinishedBuckets().add(new BucketId(16, 2)); msg.getFinishedBuckets().add(new BucketId(16, 4)); msg.setErrorMessage("error message: \u00e6\u00c6\u00f8\u00d8\u00e5\u00c5\u00f6\u00d6"); assertEquals(BASE_MESSAGE_LENGTH + 67, serialize("VisitorInfoMessage", msg)); for (Language lang : LANGUAGES) { msg = (VisitorInfoMessage)deserialize("VisitorInfoMessage", DocumentProtocol.MESSAGE_VISITORINFO, lang); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 1))); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 2))); assertTrue(msg.getFinishedBuckets().contains(new BucketId(16, 4))); assertEquals("error message: \u00e6\u00c6\u00f8\u00d8\u00e5\u00c5\u00f6\u00d6", msg.getErrorMessage()); } } } private static String CONDITION_STRING = "There's just one condition"; public class testPutDocumentMessage implements RunnableTest { void verifyCreateIfNonExistentFlag() { var msg = new PutDocumentMessage(new DocumentPut(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::"))); msg.setCreateIfNonExistent(true); int size_of_create_if_non_existent_flag = 1; int expected_serialized_size = BASE_MESSAGE_LENGTH + 45 + serializedLength(msg.getCondition().getSelection()) + size_of_create_if_non_existent_flag; assertEquals(expected_serialized_size, serialize("PutDocumentMessage-create", msg)); assertEquals(expected_serialized_size - 1, serialize("PutDocumentMessage-create-truncate", msg, data -> DataTamper.truncate(data, 1))); assertEquals(expected_serialized_size + 1, serialize("PutDocumentMessage-create-pad", msg, data -> DataTamper.pad(data, 1))); for (Language lang: LANGUAGES) { var decoded = (PutDocumentMessage)deserialize("PutDocumentMessage-create", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); var decoded_trunc = (PutDocumentMessage)deserialize("PutDocumentMessage-create-truncate", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); var decoded_pad = (PutDocumentMessage)deserialize("PutDocumentMessage-create-pad", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); assertEquals(true, decoded.getCreateIfNonExistent()); assertEquals(false, decoded_trunc.getCreateIfNonExistent()); assertEquals(true, decoded_pad.getCreateIfNonExistent()); assertTrue(decoded.getDocumentPut().equals(decoded_pad.getDocumentPut())); assertFalse(decoded.getDocumentPut().equals(decoded_trunc.getDocumentPut())); } } @Override public void run() { PutDocumentMessage msg = new PutDocumentMessage(new DocumentPut(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::"))); msg.setTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); int size_of_create_if_non_existent_flag = 1; int expected_serialized_size = BASE_MESSAGE_LENGTH + 45 + serializedLength(msg.getCondition().getSelection()) + size_of_create_if_non_existent_flag; assertEquals(expected_serialized_size, serialize("PutDocumentMessage", msg)); for (Language lang : LANGUAGES) { final PutDocumentMessage deserializedMsg = (PutDocumentMessage)deserialize("PutDocumentMessage", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); assertEquals(msg.getDocumentPut().getDocument().getDataType().getName(), deserializedMsg.getDocumentPut().getDocument().getDataType().getName()); assertEquals(msg.getDocumentPut().getDocument().getId().toString(), deserializedMsg.getDocumentPut().getDocument().getId().toString()); assertEquals(msg.getTimestamp(), deserializedMsg.getTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); assertEquals(false, deserializedMsg.getCreateIfNonExistent()); } verifyCreateIfNonExistentFlag(); } } public class testPutDocumentReply implements RunnableTest { @Override public void run() { WriteDocumentReply reply = new WriteDocumentReply(DocumentProtocol.REPLY_PUTDOCUMENT); reply.setHighestModificationTimestamp(30); assertEquals(13, serialize("PutDocumentReply", reply)); for (Language lang : LANGUAGES) { WriteDocumentReply obj = (WriteDocumentReply)deserialize("PutDocumentReply", DocumentProtocol.REPLY_PUTDOCUMENT, lang); assertNotNull(obj); assertEquals(30, obj.getHighestModificationTimestamp()); } } } public class testUpdateDocumentMessage implements RunnableTest { @Override public void run() { DocumentType docType = protocol.getDocumentTypeManager().getDocumentType("testdoc"); DocumentUpdate update = new DocumentUpdate(docType, new DocumentId("id:ns:testdoc::")); update.addFieldPathUpdate(new RemoveFieldPathUpdate(docType, "intfield", "testdoc.intfield > 0")); final UpdateDocumentMessage msg = new UpdateDocumentMessage(update); msg.setNewTimestamp(777); msg.setOldTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); assertEquals(BASE_MESSAGE_LENGTH + 93 + serializedLength(msg.getCondition().getSelection()), serialize("UpdateDocumentMessage", msg)); for (Language lang : LANGUAGES) { final UpdateDocumentMessage deserializedMsg = (UpdateDocumentMessage) deserialize("UpdateDocumentMessage", DocumentProtocol.MESSAGE_UPDATEDOCUMENT, lang); assertEquals(msg.getDocumentUpdate(), deserializedMsg.getDocumentUpdate()); assertEquals(msg.getNewTimestamp(), deserializedMsg.getNewTimestamp()); assertEquals(msg.getOldTimestamp(), deserializedMsg.getOldTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); } } } public class testUpdateDocumentReply implements RunnableTest { @Override public void run() { UpdateDocumentReply reply = new UpdateDocumentReply(); reply.setHighestModificationTimestamp(30); reply.setWasFound(false); assertEquals(14, serialize("UpdateDocumentReply", reply)); for (Language lang : LANGUAGES) { UpdateDocumentReply obj = (UpdateDocumentReply)deserialize("UpdateDocumentReply", DocumentProtocol.REPLY_UPDATEDOCUMENT, lang); assertNotNull(obj); assertEquals(30, reply.getHighestModificationTimestamp()); assertEquals(false, obj.wasFound()); } } } public class testVisitorInfoReply implements RunnableTest { @Override public void run() { testVisitorReply("VisitorInfoReply", DocumentProtocol.REPLY_VISITORINFO); } } public class testWrongDistributionReply implements RunnableTest { @Override public void run() { WrongDistributionReply reply = new WrongDistributionReply("distributor:3 storage:2"); assertEquals(32, serialize("WrongDistributionReply", reply)); for (Language lang : LANGUAGES) { reply = (WrongDistributionReply)deserialize("WrongDistributionReply", DocumentProtocol.REPLY_WRONGDISTRIBUTION, lang); assertEquals("distributor:3 storage:2", reply.getSystemState()); } } } public class testRemoveDocumentReply implements RunnableTest { @Override public void run() { RemoveDocumentReply reply = new RemoveDocumentReply(); reply.setHighestModificationTimestamp(30); reply.setWasFound(false); assertEquals(14, serialize("RemoveDocumentReply", reply)); for (Language lang : LANGUAGES) { RemoveDocumentReply obj = (RemoveDocumentReply)deserialize("RemoveDocumentReply", DocumentProtocol.REPLY_REMOVEDOCUMENT, lang); assertNotNull(obj); assertEquals(30, obj.getHighestModificationTimestamp()); assertEquals(false, obj.wasFound()); } } } public class testRemoveLocationReply implements RunnableTest { @Override public void run() { testDocumentReply("RemoveLocationReply", DocumentProtocol.REPLY_REMOVELOCATION); } } public class testStatBucketReply implements RunnableTest { @Override public void run() { StatBucketReply msg = new StatBucketReply(); msg.setResults("These are the votes of the Norwegian jury"); assertEquals(50, serialize("StatBucketReply", msg)); for (Language lang : LANGUAGES) { msg = (StatBucketReply)deserialize("StatBucketReply", DocumentProtocol.REPLY_STATBUCKET, lang); assertEquals("These are the votes of the Norwegian jury", msg.getResults()); } } } public class testQueryResultReply implements RunnableTest { @Override public void run() { testVisitorReply("QueryResultReply", DocumentProtocol.REPLY_QUERYRESULT); } } public class testQueryResultMessage implements RunnableTest { @Override public void run() throws Exception { test_result_with_match_features(); Routable routable = deserialize("QueryResultMessage-1", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); QueryResultMessage msg = (QueryResultMessage)routable; assertEquals(0, msg.getResult().getHitCount()); routable = deserialize("QueryResultMessage-2", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); com.yahoo.vdslib.SearchResult.Hit h = msg.getResult().getHit(0); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); h = msg.getResult().getHit(1); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); routable = deserialize("QueryResultMessage-3", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); h = msg.getResult().getHit(0); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); h = msg.getResult().getHit(1); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertFalse(h.getMatchFeatures().isPresent()); routable = deserialize("QueryResultMessage-4", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); msg = (QueryResultMessage)routable; assertEquals(3, msg.getResult().getHitCount()); h = msg.getResult().getHit(0); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(89.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); byte[] b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '2' }, b); h = msg.getResult().getHit(1); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(109.0, h.getRank(), 1E-6); assertEquals("doc17", h.getDocId()); b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '1' }, b); h = msg.getResult().getHit(2); assertTrue(h instanceof SearchResult.HitWithSortBlob); assertEquals(90.0, h.getRank(), 1E-6); assertEquals("doc18", h.getDocId()); b = ((SearchResult.HitWithSortBlob)h).getSortBlob(); assertEqualsData(new byte[] { 's', 'o', 'r', 't', 'd', 'a', 't', 'a', '3' }, b); } void assertEqualsData(byte[] exp, byte[] act) { assertEquals(exp.length, act.length); for (int i = 0; i < exp.length; ++i) { assertEquals(exp[i], act[i]); } } void test_result_with_match_features() { Routable routable = deserialize("QueryResultMessage-6", DocumentProtocol.MESSAGE_QUERYRESULT, Language.CPP); assertTrue(routable instanceof QueryResultMessage); var msg = (QueryResultMessage)routable; assertEquals(2, msg.getResult().getHitCount()); var h = msg.getResult().getHit(0); assertTrue(h instanceof SearchResult.Hit); assertEquals(7.0, h.getRank(), 1E-6); assertEquals("doc2", h.getDocId()); assertTrue(h.getMatchFeatures().isPresent()); var mf = h.getMatchFeatures().get(); assertEquals(12.0, mf.field("foo").asDouble(), 1E-6); assertEqualsData(new byte[] { 'T', 'h', 'e', 'r', 'e' }, mf.field("bar").asData()); h = msg.getResult().getHit(1); assertTrue(h instanceof SearchResult.Hit); assertEquals(5.0, h.getRank(), 1E-6); assertEquals("doc1", h.getDocId()); assertTrue(h.getMatchFeatures().isPresent()); mf = h.getMatchFeatures().get(); assertEquals(1.0, mf.field("foo").asDouble(), 1E-6); assertEqualsData(new byte[] { 'H', 'i' }, mf.field("bar").asData()); } } public class testGetBucketListReply implements RunnableTest { public void run() { GetBucketListReply reply = new GetBucketListReply(); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(16, 123), "foo")); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(17, 1123), "bar")); reply.getBuckets().add(new GetBucketListReply.BucketInfo(new BucketId(18, 11123), "zoink")); assertEquals(56, serialize("GetBucketListReply", reply)); for (Language lang : LANGUAGES) { reply = (GetBucketListReply)deserialize("GetBucketListReply", DocumentProtocol.REPLY_GETBUCKETLIST, lang); assertEquals(reply.getBuckets().get(0), new GetBucketListReply.BucketInfo(new BucketId(16, 123), "foo")); assertEquals(reply.getBuckets().get(1), new GetBucketListReply.BucketInfo(new BucketId(17, 1123), "bar")); assertEquals(reply.getBuckets().get(2), new GetBucketListReply.BucketInfo(new BucketId(18, 11123), "zoink")); } } } public class testGetBucketStateReply implements RunnableTest { public void run() { GlobalId foo = new GlobalId(IdString.createIdString("id:ns:testdoc::foo")); GlobalId bar = new GlobalId(IdString.createIdString("id:ns:testdoc::bar")); GetBucketStateReply reply = new GetBucketStateReply(); List<DocumentState> state = new ArrayList<>(2); state.add(new DocumentState(foo, 777, false)); state.add(new DocumentState(bar, 888, true)); reply.setBucketState(state); assertEquals(53, serialize("GetBucketStateReply", reply)); for (Language lang : LANGUAGES) { reply = (GetBucketStateReply)deserialize("GetBucketStateReply", DocumentProtocol.REPLY_GETBUCKETSTATE, lang); assertEquals(777, reply.getBucketState().get(0).getTimestamp()); assertEquals(foo, reply.getBucketState().get(0).getGid()); assertEquals(false, reply.getBucketState().get(0).isRemoveEntry()); assertEquals(888, reply.getBucketState().get(1).getTimestamp()); assertEquals(bar, reply.getBucketState().get(1).getGid()); assertEquals(true, reply.getBucketState().get(1).isRemoveEntry()); } } } public class testGetDocumentReply implements RunnableTest { public void run() { GetDocumentReply reply = new GetDocumentReply(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::")); assertEquals(47, serialize("GetDocumentReply", reply)); for (Language lang : LANGUAGES) { reply = (GetDocumentReply)deserialize("GetDocumentReply", DocumentProtocol.REPLY_GETDOCUMENT, lang); assertEquals("testdoc", reply.getDocument().getDataType().getName()); assertEquals("id:ns:testdoc::", reply.getDocument().getId().toString()); } } } public class testMapVisitorReply implements RunnableTest { public void run() { testVisitorReply("MapVisitorReply", DocumentProtocol.REPLY_MAPVISITOR); } } protected void testDocumentReply(String filename, int type) { DocumentReply reply = new DocumentReply(type); assertEquals(5, serialize(filename, reply)); for (Language lang : LANGUAGES) { reply = (DocumentReply)deserialize(filename, type, lang); assertNotNull(reply); } } protected void testVisitorReply(String filename, int type) { VisitorReply reply = new VisitorReply(type); assertEquals(5, serialize(filename, reply)); for (Language lang : LANGUAGES) { reply = (VisitorReply)deserialize(filename, type, lang); assertNotNull(reply); } } static int serializedLength(String str) { return 4 + str.length(); } }
👹
public void run() { var msg = new PutDocumentMessage(new DocumentPut(new Document(protocol.getDocumentTypeManager().getDocumentType("testdoc"), "id:ns:testdoc::"))); msg.setTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); serialize("PutDocumentMessage", msg); forEachLanguage((lang) -> { var deserializedMsg = (PutDocumentMessage)deserialize("PutDocumentMessage", DocumentProtocol.MESSAGE_PUTDOCUMENT, lang); var deserializedDoc = deserializedMsg.getDocumentPut().getDocument(); assertNotNull(deserializedDoc); assertEquals(msg.getDocumentPut().getDocument().getDataType().getName(), deserializedDoc.getDataType().getName()); assertEquals(msg.getDocumentPut().getDocument().getId().toString(), deserializedDoc.getId().toString()); assertEquals(msg.getTimestamp(), deserializedMsg.getTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); assertFalse(deserializedMsg.getCreateIfNonExistent()); }); verifyCreateIfNonExistentFlag(); }
msg.setTimestamp(666);
public void run() { var msg = new GetDocumentMessage(new DocumentId("id:ns:testdoc::"), "foo bar"); serialize("GetDocumentMessage", msg); forEachLanguage((lang) -> { var msg2 = (GetDocumentMessage)deserialize("GetDocumentMessage", DocumentProtocol.MESSAGE_GETDOCUMENT, lang); assertEquals("id:ns:testdoc::", msg2.getDocumentId().toString()); assertEquals("foo bar", msg2.getFieldSet()); }); }
class GetDocumentMessageTest implements RunnableTest { @Override }
class GetDocumentMessageTest implements RunnableTest { @Override }
👼
public void run() { var docType = protocol.getDocumentTypeManager().getDocumentType("testdoc"); var update = new DocumentUpdate(docType, new DocumentId("id:ns:testdoc::")); update.addFieldPathUpdate(new RemoveFieldPathUpdate(docType, "intfield", "testdoc.intfield > 0")); var msg = new UpdateDocumentMessage(update); msg.setNewTimestamp(777); msg.setOldTimestamp(666); msg.setCondition(new TestAndSetCondition(CONDITION_STRING)); serialize("UpdateDocumentMessage", msg); forEachLanguage((lang) -> { var deserializedMsg = (UpdateDocumentMessage)deserialize("UpdateDocumentMessage", DocumentProtocol.MESSAGE_UPDATEDOCUMENT, lang); assertEquals(msg.getDocumentUpdate(), deserializedMsg.getDocumentUpdate()); assertEquals(msg.getNewTimestamp(), deserializedMsg.getNewTimestamp()); assertEquals(msg.getOldTimestamp(), deserializedMsg.getOldTimestamp()); assertEquals(msg.getCondition().getSelection(), deserializedMsg.getCondition().getSelection()); }); }
msg.setNewTimestamp(777);
public void run() { var msg = new GetDocumentMessage(new DocumentId("id:ns:testdoc::"), "foo bar"); serialize("GetDocumentMessage", msg); forEachLanguage((lang) -> { var msg2 = (GetDocumentMessage)deserialize("GetDocumentMessage", DocumentProtocol.MESSAGE_GETDOCUMENT, lang); assertEquals("id:ns:testdoc::", msg2.getDocumentId().toString()); assertEquals("foo bar", msg2.getFieldSet()); }); }
class GetDocumentMessageTest implements RunnableTest { @Override }
class GetDocumentMessageTest implements RunnableTest { @Override }
🙈
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 304); } return new Version(8, 303); }
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 310); } return new Version(8, 309); }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
👀 💦
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 304); } return new Version(8, 303); }
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 310); } return new Version(8, 309); }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
Will this usage of `protobuf` be sufficient to never need the "version negotiation" again? I'm not super-familiar with this glorious new technology. I mean, where does _it_ check protocol versions, or whatever it does.
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 304); } return new Version(8, 303); }
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 310); } return new Version(8, 309); }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
It's entirely possible (though much less so _after_ this change than before) that we will require some future protocol bump, at which point we will need some form of negotiation. In fact, I think we'll likely want to _retain_ the versioning semantics introduced in this PR for that reason alone. This particular TODO is mostly lamenting the need for punching a hole through protocol abstraction layers (DocumentProtocol constraints leak down to the underlying MessageBus RPC code), rather than being about the change in versioning semantics itself. The way versioning works today is that all message serialization towards a peer over a specific RPC transport is sequenced after an initial `getVersion` RPC call to that peer (initiated by MessageBus). The _effective_ version used for encoding and decoding is then deterministically chosen on both sides as `min(my_version, their_version)`, with the assumption that both sides will have identical semantics for that version. A suitable `DocumentProtocol` implementation is selected for a message by picking the highest versioned factory that is lower than or equal to the shared version. This design choice complicates protocol transitions, as it requires us to have a notion of Vespa minor release versions within the code base. What we'd instead _ideally_ have is an exchange of explicit, protocol sets (`"v6"`, `"v8"` etc) (with some notion of protocol priority) where the chosen protocol is the highest priority protocol in the intersection of the peers' sets. But this would require some rather fundamental changes to the underlying MessageBus RPC networking code (and plenty of backwards compatibility handling), and this PR is large enough as it is 🙈
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 304); } return new Version(8, 303); }
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 310); } return new Version(8, 309); }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
Also, enabling the Protobuf protocol by default would just change the reported version to always be one that is >= the version that triggers both sides to use the correct codecs. The same dance could then be done again later if we wanted to add a new protocol with a version > the Protobuf protocol version.
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 304); } return new Version(8, 303); }
private static Version deriveSupportedProtocolVersion() { var maybeEnvVal = System.getenv("VESPA_MBUS_DOCUMENTAPI_USE_PROTOBUF"); if ("true".equals(maybeEnvVal) || "yes".equals(maybeEnvVal)) { return new Version(8, 310); } return new Version(8, 309); }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
class RPCNetwork implements Network, MethodHandler { private static final Logger log = Logger.getLogger(RPCNetwork.class.getName()); private final AtomicBoolean destroyed = new AtomicBoolean(false); private final Identity identity; private final Supervisor orb; private final RPCTargetPool targetPool; private final RPCServicePool servicePool; private final Acceptor listener; private final Mirror mirror; private final Register register; private final TreeMap<Version, RPCSendAdapter> sendAdapters = new TreeMap<>(); private volatile NetworkOwner owner; private final SlobrokConfigSubscriber slobroksConfig; private final LinkedHashMap<String, Route> lruRouteMap = new LinkedHashMap<>(10000, 0.5f, true); private final ExecutorService executor = new ThreadPoolExecutor(getNumThreads(), getNumThreads(), 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), ThreadFactoryFactory.getDaemonThreadFactory("mbus.net")); private static int getNumThreads() { return Math.max(2, Runtime.getRuntime().availableProcessors()/2); } private static boolean shouldEnableTcpNodelay(RPCNetworkParams.Optimization optimization) { return optimization == RPCNetworkParams.Optimization.LATENCY; } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters * @param slobrokConfig subscriber for slobroks config */ private RPCNetwork(RPCNetworkParams params, SlobrokConfigSubscriber slobrokConfig) { this.slobroksConfig = slobrokConfig; identity = params.getIdentity(); orb = new Supervisor(new Transport("mbus-rpc-" + identity.getServicePrefix(), params.getNumNetworkThreads(), shouldEnableTcpNodelay(params.getOptimization()), params.getTransportEventsBeforeWakeup())); orb.setMaxInputBufferSize(params.getMaxInputBufferSize()); orb.setMaxOutputBufferSize(params.getMaxOutputBufferSize()); targetPool = new RPCTargetPool(params.getConnectionExpireSecs(), params.getNumTargetsPerSpec()); servicePool = new RPCServicePool(this, 4096); Method method = new Method("mbus.getVersion", "", "s", this); method.requireCapabilities(CapabilitySet.none()); method.methodDesc("Retrieves the message bus version."); method.returnDesc(0, "version", "The message bus version."); orb.addMethod(method); try { listener = orb.listen(new Spec(params.getListenPort())); } catch (ListenFailedException e) { orb.transport().shutdown().join(); throw new RuntimeException(e); } TargetPoolTask task = new TargetPoolTask(targetPool, orb); task.jrtTask.scheduleNow(); register = new Register(orb, slobrokConfig.getSlobroks(), identity.getHostname(), listener.port()); mirror = new Mirror(orb, slobrokConfig.getSlobroks()); } /** * Create an RPCNetwork. The servicePrefix is combined with session names to create service names. If the service * prefix is 'a/b' and the session name is 'c', the resulting service name that identifies the session on the * message bus will be 'a/b/c' * * @param params a complete set of parameters */ public RPCNetwork(RPCNetworkParams params) { this(params, params.getSlobroksConfig() != null ? new SlobrokConfigSubscriber(params.getSlobroksConfig()) : new SlobrokConfigSubscriber(params.getSlobrokConfigId())); } /** * The network uses a cache of RPC targets (see {@link RPCTargetPool}) that allows it to save time by reusing open * connections. It works by keeping a set of the most recently used targets open. Calling this method forces all * unused connections to close immediately. */ protected void flushTargetPool() { targetPool.flushTargets(true); } final Route getRoute(String routeString) { Route route = lruRouteMap.get(routeString); if (route == null) { route = Route.parse(routeString); lruRouteMap.put(routeString, route); } return new Route(route); } @Override public boolean waitUntilReady(double seconds) { int millis = (int) seconds * 1000; int i = 0; do { if (mirror.ready()) { if (i > 200) { log.log(Level.INFO, "network became ready (at "+i+" ms)"); } return true; } if ((i == 200) || ((i > 200) && ((i % 1000) == 0))) { log.log(Level.INFO, "waiting for network to become ready ("+i+" of "+millis+" ms)"); mirror.dumpState(); } try { i += 10; Thread.sleep(10); } catch (InterruptedException e) { } } while (i < millis); return false; } @Override public boolean allocServiceAddress(RoutingNode recipient) { Hop hop = recipient.getRoute().getHop(0); String service = hop.getServiceName(); Error error = resolveServiceAddress(recipient, service); if (error == null) { return true; } recipient.setError(error); return false; } @Override public void freeServiceAddress(RoutingNode recipient) { RPCTarget target = ((RPCServiceAddress)recipient.getServiceAddress()).getTarget(); if (target != null) { target.subRef(); } recipient.setServiceAddress(null); } @Override public void attach(NetworkOwner owner) { if (this.owner != null) { throw new IllegalStateException("Network is already attached to another owner."); } this.owner = owner; sendAdapters.put(new Version(6,149), new RPCSendV2(this)); } @Override public void registerSession(String session) { register.registerName(identity.getServicePrefix() + "/" + session); } @Override public void unregisterSession(String session) { register.unregisterName(identity.getServicePrefix() + "/" + session); } @Override public void sync() { orb.transport().sync(); } @Override public void shutdown() { destroy(); } @Override public String getConnectionSpec() { return "tcp/" + identity.getHostname() + ":" + listener.port(); } @Override public IMirror getMirror() { return mirror; } @Override public void invoke(Request request) { request.returnValues().add(new StringValue(getVersion().toString())); } @Override public void send(Message msg, List<RoutingNode> recipients) { SendContext ctx = new SendContext(this, msg, recipients); Duration timeout = Duration.ofMillis(ctx.msg.getTimeRemainingNow()); for (RoutingNode recipient : ctx.recipients) { RPCServiceAddress address = (RPCServiceAddress)recipient.getServiceAddress(); address.getTarget().resolveVersion(timeout, ctx); } } private static String buildRecipientListString(SendContext ctx) { return ctx.recipients.stream().map(r -> { if (!(r.getServiceAddress() instanceof RPCServiceAddress)) { return "<non-RPC service address>"; } RPCServiceAddress addr = (RPCServiceAddress)r.getServiceAddress(); return String.format("%s at %s", addr.getServiceName(), addr.getConnectionSpec()); }).collect(Collectors.joining(", ")); } /** * This method is a callback invoked after {@link * resolved. If all versions were resolved ahead of time, this method is invoked by the same thread as the former. * If not, this method is invoked by the network thread during the version callback. * * @param ctx all the required send-data */ private void send(SendContext ctx) { if (destroyed.get()) { replyError(ctx, ErrorCode.NETWORK_SHUTDOWN, "Network layer has performed shutdown."); } else if (ctx.hasError) { replyError(ctx, ErrorCode.HANDSHAKE_FAILED, String.format("An error occurred while resolving version of recipient(s) [%s] from host '%s'.", buildRecipientListString(ctx), identity.getHostname())); } else { new SendTask(owner.getProtocol(ctx.msg.getProtocol()), ctx).run(); } } /** * Sets the destroyed flag to true. The very first time this method is called, it cleans up all its dependencies. * Even if you retain a reference to this object, all of its content is allowed to be garbage collected. * * @return true if content existed and was destroyed */ public boolean destroy() { if (!destroyed.getAndSet(true)) { if (slobroksConfig != null) { slobroksConfig.shutdown(); } register.shutdown(); mirror.shutdown(); listener.shutdown().join(); orb.transport().shutdown().join(); targetPool.flushTargets(true); executor.shutdown(); return true; } return false; } private static final Version REPORTED_VERSION = deriveSupportedProtocolVersion(); /** * Returns the (protocol) version of this network. This gets called when the "mbus.getVersion" method is invoked * on this network, and is separated into its own function so that unit tests can override it to simulate other * versions than current. * * Note that this version reflects the highest supported <em>protocol</em> version, and is not necessarily * 1-1 with the actual Vespa release version of the underlying binary. * * @return the version to claim to be */ protected Version getVersion() { return REPORTED_VERSION; } /** * Resolves and assigns a service address for the given recipient using the given address. This is called by the * {@link * calls {@link * * @param recipient the recipient to assign the service address to * @param serviceName the name of the service to resolve * @return any error encountered, or null */ public Error resolveServiceAddress(RoutingNode recipient, String serviceName) { RPCServiceAddress ret = servicePool.resolve(serviceName); if (ret == null) { return new Error(ErrorCode.NO_ADDRESS_FOR_SERVICE, String.format("The address of service '%s' could not be resolved. It is not currently " + "registered with the Vespa name server. " + "The service must be having problems, or the routing configuration is wrong. " + "Address resolution attempted from host '%s'", serviceName, identity.getHostname())); } RPCTarget target = targetPool.getTarget(orb, ret); if (target == null) { return new Error(ErrorCode.CONNECTION_ERROR, String.format("Failed to connect to service '%s' from host '%s'.", serviceName, identity.getHostname())); } ret.setTarget(target); recipient.setServiceAddress(ret); return null; } /** * Determines and returns the send adapter that is compatible with the given version. If no adapter can be found, * this method returns null. * * @param version the version for which to return an adapter * @return the compatible adapter */ public RPCSendAdapter getSendAdapter(Version version) { Map.Entry<Version, RPCSendAdapter> lower = sendAdapters.floorEntry(version); return (lower != null) ? lower.getValue() : null; } /** * Deliver an error reply to the recipients of a {@link SendContext} in a way that avoids entanglement. * * @param ctx the send context that contains the recipient data * @param errCode the error code to return * @param errMsg the error string to return */ private void replyError(SendContext ctx, int errCode, String errMsg) { for (RoutingNode recipient : ctx.recipients) { Reply reply = new EmptyReply(); reply.getTrace().setLevel(ctx.traceLevel); reply.addError(new Error(errCode, errMsg)); recipient.handleReply(reply); } } /** Returns the owner of this network. */ NetworkOwner getOwner() { return owner; } /** Returns the identity of this network. */ public Identity getIdentity() { return identity; } /** Returns the port number this network listens to. */ public int getPort() { return listener.port(); } /** Returns the JRT supervisor. */ Supervisor getSupervisor() { return orb; } ExecutorService getExecutor() { return executor; } private class SendTask implements Runnable { final Protocol protocol; final SendContext ctx; SendTask(Protocol protocol, SendContext ctx) { this.protocol = protocol; this.ctx = ctx; } public void run() { long timeRemaining = ctx.msg.getTimeRemainingNow(); if (timeRemaining <= 0) { replyError(ctx, ErrorCode.TIMEOUT, "Aborting transmission because zero time remains."); return; } byte[] payload; try { payload = protocol.encode(ctx.version, ctx.msg); } catch (Exception e) { StringWriter out = new StringWriter(); e.printStackTrace(new PrintWriter(out)); replyError(ctx, ErrorCode.ENCODE_ERROR, out.toString()); return; } if (payload == null || payload.length == 0) { replyError(ctx, ErrorCode.ENCODE_ERROR, "Protocol '" + ctx.msg.getProtocol() + "' failed to encode message."); return; } RPCSendAdapter adapter = getSendAdapter(ctx.version); if (adapter == null) { replyError(ctx, ErrorCode.INCOMPATIBLE_VERSION, "Can not send to version '" + ctx.version + "' recipient."); return; } for (RoutingNode recipient : ctx.recipients) { adapter.send(recipient, ctx.version, payload, timeRemaining); } } } /** * Implements a helper class for {@link RPCNetwork * encapsulating all the data required for sending a message, but postponing the call to {@link * RPCNetwork * been resolved. */ private static class SendContext implements RPCTarget.VersionHandler { final RPCNetwork net; final Message msg; final int traceLevel; final List<RoutingNode> recipients = new LinkedList<>(); boolean hasError = false; int pending; Version version; SendContext(RPCNetwork net, Message msg, List<RoutingNode> recipients) { this.net = net; this.msg = msg; this.traceLevel = this.msg.getTrace().getLevel(); this.recipients.addAll(recipients); this.pending = this.recipients.size(); this.version = this.net.getVersion(); } @Override public void handleVersion(Version version) { boolean shouldSend = false; synchronized (this) { if (version == null) { hasError = true; } else if (version.isBefore(this.version)) { this.version = version; } if (--pending == 0) { shouldSend = true; } } if (shouldSend) { net.send(this); } } } /** * Implements a helper class to invoke {@link RPCTargetPool * This is to untangle the target pool from the scheduler. */ private static class TargetPoolTask implements Runnable { final RPCTargetPool pool; final Task jrtTask; TargetPoolTask(RPCTargetPool pool, Supervisor orb) { this.pool = pool; this.jrtTask = orb.transport().selectThread().createTask(this); this.jrtTask.schedule(1.0); } @Override public void run() { pool.flushTargets(false); jrtTask.schedule(1.0); } } }
Remove printlns, and throw ...
public void set(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { try { var expectedType = typeOf(name); System.err.println("setting rank feature '" + name + "' -> " + value + " :: " + value.getClass()); if (expectedType != null && expectedType.declaredString()) { System.err.println("expected type: declared string"); var e = new IllegalArgumentException("foo"); e.fillInStackTrace(); e.printStackTrace(); } if (expectedType != null && ! expectedType.declaredString()) { value = tensorConverter.convertTo(expectedType.tensorType(), name.last(), value, query.getModel().getLanguage(), context, this); } } catch (IllegalArgumentException e) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "'", e); } } super.set(name, value, context); }
e.printStackTrace();
public void set(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { try { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) { value = tensorConverter.convertTo(expectedType.tensorType(), name.last(), value, query.getModel().getLanguage(), context, this); } } catch (IllegalArgumentException e) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "'", e); } } super.set(name, value, context); }
class RankProfileInputProperties extends Properties { private final SchemaInfo schemaInfo; private final Query query; private final TensorConverter tensorConverter; private SchemaInfo.Session session = null; public RankProfileInputProperties(SchemaInfo schemaInfo, Query query, Map<String, Embedder> embedders) { this.schemaInfo = schemaInfo; this.query = query; this.tensorConverter = new TensorConverter(embedders); } @Override @Override public void requireSettable(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) verifyType(name, value, expectedType.tensorType()); } super.requireSettable(name, value, context); } private RankProfile.InputType typeOf(CompoundName name) { if (session == null) session = schemaInfo.newSession(query); return session.rankProfileInput(name.last(), query.getRanking().getProfile()); } private void verifyType(CompoundName name, Object value, TensorType expectedType) { if (value instanceof Tensor) { TensorType valueType = ((Tensor)value).type(); if ( ! valueType.isAssignableTo(expectedType)) throwIllegalInput(name, value, expectedType); } else if (expectedType.rank() > 0) { throwIllegalInput(name, value, expectedType); } } private void throwIllegalInput(CompoundName name, Object value, TensorType expectedType) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "': " + "This input is declared in rank profile '" + query.getRanking().getProfile() + "' as " + expectedType); } }
class RankProfileInputProperties extends Properties { private final SchemaInfo schemaInfo; private final Query query; private final TensorConverter tensorConverter; private SchemaInfo.Session session = null; public RankProfileInputProperties(SchemaInfo schemaInfo, Query query, Map<String, Embedder> embedders) { this.schemaInfo = schemaInfo; this.query = query; this.tensorConverter = new TensorConverter(embedders); } @Override @Override public void requireSettable(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) verifyType(name, value, expectedType.tensorType()); } super.requireSettable(name, value, context); } private RankProfile.InputType typeOf(CompoundName name) { if (session == null) session = schemaInfo.newSession(query); return session.rankProfileInput(name.last(), query.getRanking().getProfile()); } private void verifyType(CompoundName name, Object value, TensorType expectedType) { if (value instanceof Tensor) { TensorType valueType = ((Tensor)value).type(); if ( ! valueType.isAssignableTo(expectedType)) throwIllegalInput(name, value, expectedType); } else if (expectedType.rank() > 0) { throwIllegalInput(name, value, expectedType); } } private void throwIllegalInput(CompoundName name, Object value, TensorType expectedType) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "': " + "This input is declared in rank profile '" + query.getRanking().getProfile() + "' as " + expectedType); } }
ouch, will fix.
public void set(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { try { var expectedType = typeOf(name); System.err.println("setting rank feature '" + name + "' -> " + value + " :: " + value.getClass()); if (expectedType != null && expectedType.declaredString()) { System.err.println("expected type: declared string"); var e = new IllegalArgumentException("foo"); e.fillInStackTrace(); e.printStackTrace(); } if (expectedType != null && ! expectedType.declaredString()) { value = tensorConverter.convertTo(expectedType.tensorType(), name.last(), value, query.getModel().getLanguage(), context, this); } } catch (IllegalArgumentException e) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "'", e); } } super.set(name, value, context); }
e.printStackTrace();
public void set(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { try { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) { value = tensorConverter.convertTo(expectedType.tensorType(), name.last(), value, query.getModel().getLanguage(), context, this); } } catch (IllegalArgumentException e) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "'", e); } } super.set(name, value, context); }
class RankProfileInputProperties extends Properties { private final SchemaInfo schemaInfo; private final Query query; private final TensorConverter tensorConverter; private SchemaInfo.Session session = null; public RankProfileInputProperties(SchemaInfo schemaInfo, Query query, Map<String, Embedder> embedders) { this.schemaInfo = schemaInfo; this.query = query; this.tensorConverter = new TensorConverter(embedders); } @Override @Override public void requireSettable(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) verifyType(name, value, expectedType.tensorType()); } super.requireSettable(name, value, context); } private RankProfile.InputType typeOf(CompoundName name) { if (session == null) session = schemaInfo.newSession(query); return session.rankProfileInput(name.last(), query.getRanking().getProfile()); } private void verifyType(CompoundName name, Object value, TensorType expectedType) { if (value instanceof Tensor) { TensorType valueType = ((Tensor)value).type(); if ( ! valueType.isAssignableTo(expectedType)) throwIllegalInput(name, value, expectedType); } else if (expectedType.rank() > 0) { throwIllegalInput(name, value, expectedType); } } private void throwIllegalInput(CompoundName name, Object value, TensorType expectedType) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "': " + "This input is declared in rank profile '" + query.getRanking().getProfile() + "' as " + expectedType); } }
class RankProfileInputProperties extends Properties { private final SchemaInfo schemaInfo; private final Query query; private final TensorConverter tensorConverter; private SchemaInfo.Session session = null; public RankProfileInputProperties(SchemaInfo schemaInfo, Query query, Map<String, Embedder> embedders) { this.schemaInfo = schemaInfo; this.query = query; this.tensorConverter = new TensorConverter(embedders); } @Override @Override public void requireSettable(CompoundName name, Object value, Map<String, String> context) { if (RankFeatures.isFeatureName(name.toString())) { var expectedType = typeOf(name); if (expectedType != null && ! expectedType.declaredString()) verifyType(name, value, expectedType.tensorType()); } super.requireSettable(name, value, context); } private RankProfile.InputType typeOf(CompoundName name) { if (session == null) session = schemaInfo.newSession(query); return session.rankProfileInput(name.last(), query.getRanking().getProfile()); } private void verifyType(CompoundName name, Object value, TensorType expectedType) { if (value instanceof Tensor) { TensorType valueType = ((Tensor)value).type(); if ( ! valueType.isAssignableTo(expectedType)) throwIllegalInput(name, value, expectedType); } else if (expectedType.rank() > 0) { throwIllegalInput(name, value, expectedType); } } private void throwIllegalInput(CompoundName name, Object value, TensorType expectedType) { throw new IllegalInputException("Could not set '" + name + "' to '" + value + "': " + "This input is declared in rank profile '" + query.getRanking().getProfile() + "' as " + expectedType); } }
:laughing:
public static CloudAccount from(String cloudAccount) { int index = cloudAccount.indexOf(':'); if (index < 0) { if (cloudAccount.isEmpty() || cloudAccount.equals("default")) return empty; if (META_BY_CLOUD.get("aws").matches(cloudAccount)) return new CloudAccount(cloudAccount, CloudName.AWS); if (META_BY_CLOUD.get("gcp").matches(cloudAccount)) return new CloudAccount(cloudAccount, CloudName.GCP); throw illegal(cloudAccount, "Must be on format '<cloud-name>:<account>' or 'default'"); } String cloud = cloudAccount.substring(0, index); String account = cloudAccount.substring(index + 1); CloudMeta cloudMeta = META_BY_CLOUD.get(cloud); if (cloudMeta == null) throw illegal(cloudAccount, "Cloud name must be one of: " + META_BY_CLOUD.keySet().stream().sorted().collect(Collectors.joining(", "))); if (!cloudMeta.matches(account)) throw illegal(cloudAccount, cloudMeta.accountType + " must match '" + cloudMeta.pattern.pattern() + "'"); return new CloudAccount(account, CloudName.from(cloud)); }
if (META_BY_CLOUD.get("gcp").matches(cloudAccount))
public static CloudAccount from(String cloudAccount) { int index = cloudAccount.indexOf(':'); if (index < 0) { if (cloudAccount.isEmpty() || cloudAccount.equals("default")) return empty; if (META_BY_CLOUD.get("aws").matches(cloudAccount)) return new CloudAccount(cloudAccount, CloudName.AWS); if (META_BY_CLOUD.get("gcp").matches(cloudAccount)) return new CloudAccount(cloudAccount, CloudName.GCP); throw illegal(cloudAccount, "Must be on format '<cloud-name>:<account>' or 'default'"); } String cloud = cloudAccount.substring(0, index); String account = cloudAccount.substring(index + 1); CloudMeta cloudMeta = META_BY_CLOUD.get(cloud); if (cloudMeta == null) throw illegal(cloudAccount, "Cloud name must be one of: " + META_BY_CLOUD.keySet().stream().sorted().collect(Collectors.joining(", "))); if (!cloudMeta.matches(account)) throw illegal(cloudAccount, cloudMeta.accountType + " must match '" + cloudMeta.pattern.pattern() + "'"); return new CloudAccount(account, CloudName.from(cloud)); }
class CloudAccount implements Comparable<CloudAccount> { private record CloudMeta(String accountType, Pattern pattern) { private boolean matches(String account) { return pattern.matcher(account).matches(); } } private static final Map<String, CloudMeta> META_BY_CLOUD = Map.of( "aws", new CloudMeta("Account ID", Pattern.compile("[0-9]{12}")), "azure", new CloudMeta("Subscription ID", Pattern.compile("[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}")), "gcp", new CloudMeta("Project ID", Pattern.compile("[a-z][a-z0-9-]{4,28}[a-z0-9]"))); /** Empty value. When this is used, either implicitly or explicitly, the zone will use its default account */ public static final CloudAccount empty = new CloudAccount("", CloudName.DEFAULT); private final String account; private final CloudName cloudName; private CloudAccount(String account, CloudName cloudName) { this.account = account; this.cloudName = cloudName; } public String account() { return account; } public CloudName cloudName() { return cloudName; } /** Returns the serialized value of this account that can be deserialized with {@link CloudAccount public final String value() { if (isUnspecified()) return account; return cloudName.value() + ':' + account; } public boolean isUnspecified() { return this.equals(empty); } /** Returns true if this is an exclave account. */ public boolean isExclave(Zone zone) { return !isUnspecified() && zone.system().isPublic() && !equals(zone.cloud().account()); } /** Returns true if this is an enclave account. */ public boolean isEnclave(Zone zone) { return !isUnspecified() && !equals(zone.cloud().account()); } @Override public String toString() { return isUnspecified() ? "unspecified account" : "account '" + account + "' in " + cloudName; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CloudAccount that = (CloudAccount) o; return account.equals(that.account) && cloudName.equals(that.cloudName); } @Override public int hashCode() { return Objects.hash(account, cloudName); } @Override public int compareTo(CloudAccount o) { return this.value().compareTo(o.value()); } private static IllegalArgumentException illegal(String cloudAccount, String details) { return new IllegalArgumentException("Invalid cloud account '" + cloudAccount + "': " + details); } }
class CloudAccount implements Comparable<CloudAccount> { private record CloudMeta(String accountType, Pattern pattern) { private boolean matches(String account) { return pattern.matcher(account).matches(); } } private static final Map<String, CloudMeta> META_BY_CLOUD = Map.of( "aws", new CloudMeta("Account ID", Pattern.compile("[0-9]{12}")), "azure", new CloudMeta("Subscription ID", Pattern.compile("[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}")), "gcp", new CloudMeta("Project ID", Pattern.compile("[a-z][a-z0-9-]{4,28}[a-z0-9]"))); /** Empty value. When this is used, either implicitly or explicitly, the zone will use its default account */ public static final CloudAccount empty = new CloudAccount("", CloudName.DEFAULT); private final String account; private final CloudName cloudName; private CloudAccount(String account, CloudName cloudName) { this.account = account; this.cloudName = cloudName; } public String account() { return account; } public CloudName cloudName() { return cloudName; } /** Returns the serialized value of this account that can be deserialized with {@link CloudAccount public final String value() { if (isUnspecified()) return account; return cloudName.value() + ':' + account; } public boolean isUnspecified() { return this.equals(empty); } /** Returns true if this is an exclave account. */ public boolean isExclave(Zone zone) { return !isUnspecified() && zone.system().isPublic() && !equals(zone.cloud().account()); } /** Returns true if this is an enclave account. */ public boolean isEnclave(Zone zone) { return !isUnspecified() && !equals(zone.cloud().account()); } @Override public String toString() { return isUnspecified() ? "unspecified account" : "account '" + account + "' in " + cloudName; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CloudAccount that = (CloudAccount) o; return account.equals(that.account) && cloudName.equals(that.cloudName); } @Override public int hashCode() { return Objects.hash(account, cloudName); } @Override public int compareTo(CloudAccount o) { return this.value().compareTo(o.value()); } private static IllegalArgumentException illegal(String cloudAccount, String details) { return new IllegalArgumentException("Invalid cloud account '" + cloudAccount + "': " + details); } }
Something wrong with the indentation? Applies for the functions below as well.
private record Join(List<DocumentDatabase> docDbs) { public void getConfig(IndexInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(SchemaInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(IlscriptsConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(AttributesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(RankProfilesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } }
public void getConfig(SchemaInfoConfig.Builder builder) {
private record Join(List<DocumentDatabase> docDbs) { public void getConfig(IndexInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(SchemaInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(IlscriptsConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(AttributesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(RankProfilesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } }
class IndexedSearchCluster extends SearchCluster implements DocumentdbInfoConfig.Producer, IndexInfoConfig.Producer, SchemaInfoConfig.Producer, IlscriptsConfig.Producer, DispatchConfig.Producer, DispatchNodesConfig.Producer, ConfigInstance.Producer { private Tuning tuning; private SearchCoverage searchCoverage; private final List<DocumentDatabase> documentDbs = new LinkedList<>(); private final Redundancy.Provider redundancyProvider; private final List<SearchNode> searchNodes = new ArrayList<>(); private final DispatchTuning.DispatchPolicy defaultDispatchPolicy; private final double dispatchWarmup; private final String summaryDecodePolicy; public IndexedSearchCluster(TreeConfigProducer<AnyConfigProducer> parent, String clusterName, int index, Redundancy.Provider redundancyProvider, ModelContext.FeatureFlags featureFlags) { super(parent, clusterName, index); this.redundancyProvider = redundancyProvider; defaultDispatchPolicy = DispatchTuning.Builder.toDispatchPolicy(featureFlags.queryDispatchPolicy()); dispatchWarmup = featureFlags.queryDispatchWarmup(); summaryDecodePolicy = featureFlags.summaryDecodePolicy(); } @Override protected IndexingMode getIndexingMode() { return IndexingMode.REALTIME; } public void addSearcher(SearchNode searcher) { searchNodes.add(searcher); } public List<SearchNode> getSearchNodes() { return Collections.unmodifiableList(searchNodes); } public int getSearchNodeCount() { return searchNodes.size(); } public SearchNode getSearchNode(int index) { return searchNodes.get(index); } public void setTuning(Tuning tuning) { this.tuning = tuning; } public Tuning getTuning() { return tuning; } public void fillDocumentDBConfig(String documentType, ProtonConfig.Documentdb.Builder builder) { for (DocumentDatabase sdoc : documentDbs) { if (sdoc.getName().equals(documentType)) { fillDocumentDBConfig(sdoc, builder); return; } } } private void fillDocumentDBConfig(DocumentDatabase sdoc, ProtonConfig.Documentdb.Builder ddbB) { ddbB.inputdoctypename(sdoc.getSchemaName()) .configid(sdoc.getConfigId()); } @Override public void deriveFromSchemas(DeployState deployState) { for (SchemaInfo spec : schemas().values()) { if (spec.fullSchema() instanceof DocumentOnlySchema) continue; DocumentDatabase db = new DocumentDatabase(this, spec.fullSchema().getName(), new DerivedConfiguration(spec.fullSchema(), deployState, false)); documentDbs.add(db); } } @Override public List<DocumentDatabase> getDocumentDbs() { return documentDbs; } public boolean hasDocumentDB(String name) { for (DocumentDatabase db : documentDbs) { if (db.getName().equals(name)) { return true; } } return false; } public void setSearchCoverage(SearchCoverage searchCoverage) { this.searchCoverage = searchCoverage; } @Override public void getConfig(DocumentdbInfoConfig.Builder builder) { for (DocumentDatabase db : documentDbs) { DocumentdbInfoConfig.Documentdb.Builder docDb = new DocumentdbInfoConfig.Documentdb.Builder(); docDb.name(db.getName()); builder.documentdb(docDb); } } @Override public void getConfig(IndexInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(SchemaInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(IlscriptsConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(AttributesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(RankProfilesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } private static DistributionPolicy.Enum toDistributionPolicy(DispatchTuning.DispatchPolicy tuning) { return switch (tuning) { case ADAPTIVE: yield DistributionPolicy.ADAPTIVE; case ROUNDROBIN: yield DistributionPolicy.ROUNDROBIN; case BEST_OF_RANDOM_2: yield DistributionPolicy.BEST_OF_RANDOM_2; case LATENCY_AMORTIZED_OVER_REQUESTS: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_REQUESTS; case LATENCY_AMORTIZED_OVER_TIME: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_TIME; }; } @Override public void getConfig(DispatchNodesConfig.Builder builder) { for (SearchNode node : getSearchNodes()) { DispatchNodesConfig.Node.Builder nodeBuilder = new DispatchNodesConfig.Node.Builder(); nodeBuilder.key(node.getDistributionKey()); nodeBuilder.group(node.getNodeSpec().groupIndex()); nodeBuilder.host(node.getHostName()); nodeBuilder.port(node.getRpcPort()); builder.node(nodeBuilder); } } @Override public void getConfig(DispatchConfig.Builder builder) { if (tuning.dispatch.getTopkProbability() != null) { builder.topKProbability(tuning.dispatch.getTopkProbability()); } if (tuning.dispatch.getMinActiveDocsCoverage() != null) builder.minActivedocsPercentage(tuning.dispatch.getMinActiveDocsCoverage()); if (tuning.dispatch.getDispatchPolicy() != null) { builder.distributionPolicy(toDistributionPolicy(tuning.dispatch.getDispatchPolicy())); } else { builder.distributionPolicy(toDistributionPolicy(defaultDispatchPolicy)); } if (tuning.dispatch.getMaxHitsPerPartition() != null) builder.maxHitsPerNode(tuning.dispatch.getMaxHitsPerPartition()); builder.redundancy(redundancyProvider.redundancy().finalRedundancy()); if (searchCoverage != null) { if (searchCoverage.getMinimum() != null) builder.minSearchCoverage(searchCoverage.getMinimum() * 100.0); if (searchCoverage.getMinWaitAfterCoverageFactor() != null) builder.minWaitAfterCoverageFactor(searchCoverage.getMinWaitAfterCoverageFactor()); if (searchCoverage.getMaxWaitAfterCoverageFactor() != null) builder.maxWaitAfterCoverageFactor(searchCoverage.getMaxWaitAfterCoverageFactor()); } builder.warmuptime(dispatchWarmup); builder.summaryDecodePolicy(toSummaryDecoding(summaryDecodePolicy)); } private DispatchConfig.SummaryDecodePolicy.Enum toSummaryDecoding(String summaryDecodeType) { return switch (summaryDecodeType.toLowerCase()) { case "eager" -> DispatchConfig.SummaryDecodePolicy.EAGER; case "ondemand","on-demand" -> DispatchConfig.SummaryDecodePolicy.Enum.ONDEMAND; default -> DispatchConfig.SummaryDecodePolicy.Enum.EAGER; }; } @Override public String toString() { return "Indexing cluster '" + getClusterName() + "'"; } /** * Class used to retrieve combined configuration from multiple document databases. * It is not a direct {@link ConfigInstance.Producer} of those configs, * that is handled (by delegating to this) by the {@link IndexedSearchCluster} * which is the parent to this. This avoids building the config multiple times. */ }
class IndexedSearchCluster extends SearchCluster implements DispatchConfig.Producer, DispatchNodesConfig.Producer { private Tuning tuning; private SearchCoverage searchCoverage; private final List<DocumentDatabase> documentDbs = new LinkedList<>(); private final Redundancy.Provider redundancyProvider; private final List<SearchNode> searchNodes = new ArrayList<>(); private final DispatchTuning.DispatchPolicy defaultDispatchPolicy; private final double dispatchWarmup; private final String summaryDecodePolicy; public IndexedSearchCluster(TreeConfigProducer<AnyConfigProducer> parent, String clusterName, int index, Redundancy.Provider redundancyProvider, ModelContext.FeatureFlags featureFlags) { super(parent, clusterName, index); this.redundancyProvider = redundancyProvider; defaultDispatchPolicy = DispatchTuning.Builder.toDispatchPolicy(featureFlags.queryDispatchPolicy()); dispatchWarmup = featureFlags.queryDispatchWarmup(); summaryDecodePolicy = featureFlags.summaryDecodePolicy(); } @Override protected IndexingMode getIndexingMode() { return IndexingMode.REALTIME; } public void addSearcher(SearchNode searcher) { searchNodes.add(searcher); } public List<SearchNode> getSearchNodes() { return Collections.unmodifiableList(searchNodes); } public int getSearchNodeCount() { return searchNodes.size(); } public SearchNode getSearchNode(int index) { return searchNodes.get(index); } public void setTuning(Tuning tuning) { this.tuning = tuning; } public Tuning getTuning() { return tuning; } public void fillDocumentDBConfig(String documentType, ProtonConfig.Documentdb.Builder builder) { for (DocumentDatabase sdoc : documentDbs) { if (sdoc.getName().equals(documentType)) { fillDocumentDBConfig(sdoc, builder); return; } } } private void fillDocumentDBConfig(DocumentDatabase sdoc, ProtonConfig.Documentdb.Builder ddbB) { ddbB.inputdoctypename(sdoc.getSchemaName()) .configid(sdoc.getConfigId()); } @Override public void deriveFromSchemas(DeployState deployState) { for (SchemaInfo spec : schemas().values()) { if (spec.fullSchema() instanceof DocumentOnlySchema) continue; DocumentDatabase db = new DocumentDatabase(this, spec.fullSchema().getName(), new DerivedConfiguration(spec.fullSchema(), deployState, false)); documentDbs.add(db); } } @Override public List<DocumentDatabase> getDocumentDbs() { return documentDbs; } public boolean hasDocumentDB(String name) { for (DocumentDatabase db : documentDbs) { if (db.getName().equals(name)) { return true; } } return false; } public void setSearchCoverage(SearchCoverage searchCoverage) { this.searchCoverage = searchCoverage; } @Override public void getConfig(DocumentdbInfoConfig.Builder builder) { for (DocumentDatabase db : documentDbs) { DocumentdbInfoConfig.Documentdb.Builder docDb = new DocumentdbInfoConfig.Documentdb.Builder(); docDb.name(db.getName()); builder.documentdb(docDb); } } @Override public void getConfig(IndexInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(SchemaInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(IlscriptsConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(AttributesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(RankProfilesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } private static DistributionPolicy.Enum toDistributionPolicy(DispatchTuning.DispatchPolicy tuning) { return switch (tuning) { case ADAPTIVE: yield DistributionPolicy.ADAPTIVE; case ROUNDROBIN: yield DistributionPolicy.ROUNDROBIN; case BEST_OF_RANDOM_2: yield DistributionPolicy.BEST_OF_RANDOM_2; case LATENCY_AMORTIZED_OVER_REQUESTS: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_REQUESTS; case LATENCY_AMORTIZED_OVER_TIME: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_TIME; }; } @Override public void getConfig(DispatchNodesConfig.Builder builder) { for (SearchNode node : getSearchNodes()) { DispatchNodesConfig.Node.Builder nodeBuilder = new DispatchNodesConfig.Node.Builder(); nodeBuilder.key(node.getDistributionKey()); nodeBuilder.group(node.getNodeSpec().groupIndex()); nodeBuilder.host(node.getHostName()); nodeBuilder.port(node.getRpcPort()); builder.node(nodeBuilder); } } @Override public void getConfig(DispatchConfig.Builder builder) { if (tuning.dispatch.getTopkProbability() != null) { builder.topKProbability(tuning.dispatch.getTopkProbability()); } if (tuning.dispatch.getMinActiveDocsCoverage() != null) builder.minActivedocsPercentage(tuning.dispatch.getMinActiveDocsCoverage()); if (tuning.dispatch.getDispatchPolicy() != null) { builder.distributionPolicy(toDistributionPolicy(tuning.dispatch.getDispatchPolicy())); } else { builder.distributionPolicy(toDistributionPolicy(defaultDispatchPolicy)); } if (tuning.dispatch.getMaxHitsPerPartition() != null) builder.maxHitsPerNode(tuning.dispatch.getMaxHitsPerPartition()); builder.redundancy(redundancyProvider.redundancy().finalRedundancy()); if (searchCoverage != null) { if (searchCoverage.getMinimum() != null) builder.minSearchCoverage(searchCoverage.getMinimum() * 100.0); if (searchCoverage.getMinWaitAfterCoverageFactor() != null) builder.minWaitAfterCoverageFactor(searchCoverage.getMinWaitAfterCoverageFactor()); if (searchCoverage.getMaxWaitAfterCoverageFactor() != null) builder.maxWaitAfterCoverageFactor(searchCoverage.getMaxWaitAfterCoverageFactor()); } builder.warmuptime(dispatchWarmup); builder.summaryDecodePolicy(toSummaryDecoding(summaryDecodePolicy)); } private DispatchConfig.SummaryDecodePolicy.Enum toSummaryDecoding(String summaryDecodeType) { return switch (summaryDecodeType.toLowerCase()) { case "eager" -> DispatchConfig.SummaryDecodePolicy.EAGER; case "ondemand","on-demand" -> DispatchConfig.SummaryDecodePolicy.Enum.ONDEMAND; default -> DispatchConfig.SummaryDecodePolicy.Enum.EAGER; }; } @Override public String toString() { return "Indexing cluster '" + getClusterName() + "'"; } /** * Class used to retrieve combined configuration from multiple document databases. * It is not a direct {@link ConfigInstance.Producer} of those configs, * that is handled (by delegating to this) by the {@link IndexedSearchCluster} * which is the parent to this. This avoids building the config multiple times. */ }
Good point, I wonder if this happened the last time intellij converted to a record too.
private record Join(List<DocumentDatabase> docDbs) { public void getConfig(IndexInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(SchemaInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(IlscriptsConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(AttributesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(RankProfilesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } }
public void getConfig(SchemaInfoConfig.Builder builder) {
private record Join(List<DocumentDatabase> docDbs) { public void getConfig(IndexInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(SchemaInfoConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(IlscriptsConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(AttributesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } public void getConfig(RankProfilesConfig.Builder builder) { for (DocumentDatabase docDb : docDbs) { docDb.getConfig(builder); } } }
class IndexedSearchCluster extends SearchCluster implements DocumentdbInfoConfig.Producer, IndexInfoConfig.Producer, SchemaInfoConfig.Producer, IlscriptsConfig.Producer, DispatchConfig.Producer, DispatchNodesConfig.Producer, ConfigInstance.Producer { private Tuning tuning; private SearchCoverage searchCoverage; private final List<DocumentDatabase> documentDbs = new LinkedList<>(); private final Redundancy.Provider redundancyProvider; private final List<SearchNode> searchNodes = new ArrayList<>(); private final DispatchTuning.DispatchPolicy defaultDispatchPolicy; private final double dispatchWarmup; private final String summaryDecodePolicy; public IndexedSearchCluster(TreeConfigProducer<AnyConfigProducer> parent, String clusterName, int index, Redundancy.Provider redundancyProvider, ModelContext.FeatureFlags featureFlags) { super(parent, clusterName, index); this.redundancyProvider = redundancyProvider; defaultDispatchPolicy = DispatchTuning.Builder.toDispatchPolicy(featureFlags.queryDispatchPolicy()); dispatchWarmup = featureFlags.queryDispatchWarmup(); summaryDecodePolicy = featureFlags.summaryDecodePolicy(); } @Override protected IndexingMode getIndexingMode() { return IndexingMode.REALTIME; } public void addSearcher(SearchNode searcher) { searchNodes.add(searcher); } public List<SearchNode> getSearchNodes() { return Collections.unmodifiableList(searchNodes); } public int getSearchNodeCount() { return searchNodes.size(); } public SearchNode getSearchNode(int index) { return searchNodes.get(index); } public void setTuning(Tuning tuning) { this.tuning = tuning; } public Tuning getTuning() { return tuning; } public void fillDocumentDBConfig(String documentType, ProtonConfig.Documentdb.Builder builder) { for (DocumentDatabase sdoc : documentDbs) { if (sdoc.getName().equals(documentType)) { fillDocumentDBConfig(sdoc, builder); return; } } } private void fillDocumentDBConfig(DocumentDatabase sdoc, ProtonConfig.Documentdb.Builder ddbB) { ddbB.inputdoctypename(sdoc.getSchemaName()) .configid(sdoc.getConfigId()); } @Override public void deriveFromSchemas(DeployState deployState) { for (SchemaInfo spec : schemas().values()) { if (spec.fullSchema() instanceof DocumentOnlySchema) continue; DocumentDatabase db = new DocumentDatabase(this, spec.fullSchema().getName(), new DerivedConfiguration(spec.fullSchema(), deployState, false)); documentDbs.add(db); } } @Override public List<DocumentDatabase> getDocumentDbs() { return documentDbs; } public boolean hasDocumentDB(String name) { for (DocumentDatabase db : documentDbs) { if (db.getName().equals(name)) { return true; } } return false; } public void setSearchCoverage(SearchCoverage searchCoverage) { this.searchCoverage = searchCoverage; } @Override public void getConfig(DocumentdbInfoConfig.Builder builder) { for (DocumentDatabase db : documentDbs) { DocumentdbInfoConfig.Documentdb.Builder docDb = new DocumentdbInfoConfig.Documentdb.Builder(); docDb.name(db.getName()); builder.documentdb(docDb); } } @Override public void getConfig(IndexInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(SchemaInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(IlscriptsConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(AttributesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(RankProfilesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } private static DistributionPolicy.Enum toDistributionPolicy(DispatchTuning.DispatchPolicy tuning) { return switch (tuning) { case ADAPTIVE: yield DistributionPolicy.ADAPTIVE; case ROUNDROBIN: yield DistributionPolicy.ROUNDROBIN; case BEST_OF_RANDOM_2: yield DistributionPolicy.BEST_OF_RANDOM_2; case LATENCY_AMORTIZED_OVER_REQUESTS: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_REQUESTS; case LATENCY_AMORTIZED_OVER_TIME: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_TIME; }; } @Override public void getConfig(DispatchNodesConfig.Builder builder) { for (SearchNode node : getSearchNodes()) { DispatchNodesConfig.Node.Builder nodeBuilder = new DispatchNodesConfig.Node.Builder(); nodeBuilder.key(node.getDistributionKey()); nodeBuilder.group(node.getNodeSpec().groupIndex()); nodeBuilder.host(node.getHostName()); nodeBuilder.port(node.getRpcPort()); builder.node(nodeBuilder); } } @Override public void getConfig(DispatchConfig.Builder builder) { if (tuning.dispatch.getTopkProbability() != null) { builder.topKProbability(tuning.dispatch.getTopkProbability()); } if (tuning.dispatch.getMinActiveDocsCoverage() != null) builder.minActivedocsPercentage(tuning.dispatch.getMinActiveDocsCoverage()); if (tuning.dispatch.getDispatchPolicy() != null) { builder.distributionPolicy(toDistributionPolicy(tuning.dispatch.getDispatchPolicy())); } else { builder.distributionPolicy(toDistributionPolicy(defaultDispatchPolicy)); } if (tuning.dispatch.getMaxHitsPerPartition() != null) builder.maxHitsPerNode(tuning.dispatch.getMaxHitsPerPartition()); builder.redundancy(redundancyProvider.redundancy().finalRedundancy()); if (searchCoverage != null) { if (searchCoverage.getMinimum() != null) builder.minSearchCoverage(searchCoverage.getMinimum() * 100.0); if (searchCoverage.getMinWaitAfterCoverageFactor() != null) builder.minWaitAfterCoverageFactor(searchCoverage.getMinWaitAfterCoverageFactor()); if (searchCoverage.getMaxWaitAfterCoverageFactor() != null) builder.maxWaitAfterCoverageFactor(searchCoverage.getMaxWaitAfterCoverageFactor()); } builder.warmuptime(dispatchWarmup); builder.summaryDecodePolicy(toSummaryDecoding(summaryDecodePolicy)); } private DispatchConfig.SummaryDecodePolicy.Enum toSummaryDecoding(String summaryDecodeType) { return switch (summaryDecodeType.toLowerCase()) { case "eager" -> DispatchConfig.SummaryDecodePolicy.EAGER; case "ondemand","on-demand" -> DispatchConfig.SummaryDecodePolicy.Enum.ONDEMAND; default -> DispatchConfig.SummaryDecodePolicy.Enum.EAGER; }; } @Override public String toString() { return "Indexing cluster '" + getClusterName() + "'"; } /** * Class used to retrieve combined configuration from multiple document databases. * It is not a direct {@link ConfigInstance.Producer} of those configs, * that is handled (by delegating to this) by the {@link IndexedSearchCluster} * which is the parent to this. This avoids building the config multiple times. */ }
class IndexedSearchCluster extends SearchCluster implements DispatchConfig.Producer, DispatchNodesConfig.Producer { private Tuning tuning; private SearchCoverage searchCoverage; private final List<DocumentDatabase> documentDbs = new LinkedList<>(); private final Redundancy.Provider redundancyProvider; private final List<SearchNode> searchNodes = new ArrayList<>(); private final DispatchTuning.DispatchPolicy defaultDispatchPolicy; private final double dispatchWarmup; private final String summaryDecodePolicy; public IndexedSearchCluster(TreeConfigProducer<AnyConfigProducer> parent, String clusterName, int index, Redundancy.Provider redundancyProvider, ModelContext.FeatureFlags featureFlags) { super(parent, clusterName, index); this.redundancyProvider = redundancyProvider; defaultDispatchPolicy = DispatchTuning.Builder.toDispatchPolicy(featureFlags.queryDispatchPolicy()); dispatchWarmup = featureFlags.queryDispatchWarmup(); summaryDecodePolicy = featureFlags.summaryDecodePolicy(); } @Override protected IndexingMode getIndexingMode() { return IndexingMode.REALTIME; } public void addSearcher(SearchNode searcher) { searchNodes.add(searcher); } public List<SearchNode> getSearchNodes() { return Collections.unmodifiableList(searchNodes); } public int getSearchNodeCount() { return searchNodes.size(); } public SearchNode getSearchNode(int index) { return searchNodes.get(index); } public void setTuning(Tuning tuning) { this.tuning = tuning; } public Tuning getTuning() { return tuning; } public void fillDocumentDBConfig(String documentType, ProtonConfig.Documentdb.Builder builder) { for (DocumentDatabase sdoc : documentDbs) { if (sdoc.getName().equals(documentType)) { fillDocumentDBConfig(sdoc, builder); return; } } } private void fillDocumentDBConfig(DocumentDatabase sdoc, ProtonConfig.Documentdb.Builder ddbB) { ddbB.inputdoctypename(sdoc.getSchemaName()) .configid(sdoc.getConfigId()); } @Override public void deriveFromSchemas(DeployState deployState) { for (SchemaInfo spec : schemas().values()) { if (spec.fullSchema() instanceof DocumentOnlySchema) continue; DocumentDatabase db = new DocumentDatabase(this, spec.fullSchema().getName(), new DerivedConfiguration(spec.fullSchema(), deployState, false)); documentDbs.add(db); } } @Override public List<DocumentDatabase> getDocumentDbs() { return documentDbs; } public boolean hasDocumentDB(String name) { for (DocumentDatabase db : documentDbs) { if (db.getName().equals(name)) { return true; } } return false; } public void setSearchCoverage(SearchCoverage searchCoverage) { this.searchCoverage = searchCoverage; } @Override public void getConfig(DocumentdbInfoConfig.Builder builder) { for (DocumentDatabase db : documentDbs) { DocumentdbInfoConfig.Documentdb.Builder docDb = new DocumentdbInfoConfig.Documentdb.Builder(); docDb.name(db.getName()); builder.documentdb(docDb); } } @Override public void getConfig(IndexInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(SchemaInfoConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } @Override public void getConfig(IlscriptsConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(AttributesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } public void getConfig(RankProfilesConfig.Builder builder) { new Join(documentDbs).getConfig(builder); } private static DistributionPolicy.Enum toDistributionPolicy(DispatchTuning.DispatchPolicy tuning) { return switch (tuning) { case ADAPTIVE: yield DistributionPolicy.ADAPTIVE; case ROUNDROBIN: yield DistributionPolicy.ROUNDROBIN; case BEST_OF_RANDOM_2: yield DistributionPolicy.BEST_OF_RANDOM_2; case LATENCY_AMORTIZED_OVER_REQUESTS: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_REQUESTS; case LATENCY_AMORTIZED_OVER_TIME: yield DistributionPolicy.LATENCY_AMORTIZED_OVER_TIME; }; } @Override public void getConfig(DispatchNodesConfig.Builder builder) { for (SearchNode node : getSearchNodes()) { DispatchNodesConfig.Node.Builder nodeBuilder = new DispatchNodesConfig.Node.Builder(); nodeBuilder.key(node.getDistributionKey()); nodeBuilder.group(node.getNodeSpec().groupIndex()); nodeBuilder.host(node.getHostName()); nodeBuilder.port(node.getRpcPort()); builder.node(nodeBuilder); } } @Override public void getConfig(DispatchConfig.Builder builder) { if (tuning.dispatch.getTopkProbability() != null) { builder.topKProbability(tuning.dispatch.getTopkProbability()); } if (tuning.dispatch.getMinActiveDocsCoverage() != null) builder.minActivedocsPercentage(tuning.dispatch.getMinActiveDocsCoverage()); if (tuning.dispatch.getDispatchPolicy() != null) { builder.distributionPolicy(toDistributionPolicy(tuning.dispatch.getDispatchPolicy())); } else { builder.distributionPolicy(toDistributionPolicy(defaultDispatchPolicy)); } if (tuning.dispatch.getMaxHitsPerPartition() != null) builder.maxHitsPerNode(tuning.dispatch.getMaxHitsPerPartition()); builder.redundancy(redundancyProvider.redundancy().finalRedundancy()); if (searchCoverage != null) { if (searchCoverage.getMinimum() != null) builder.minSearchCoverage(searchCoverage.getMinimum() * 100.0); if (searchCoverage.getMinWaitAfterCoverageFactor() != null) builder.minWaitAfterCoverageFactor(searchCoverage.getMinWaitAfterCoverageFactor()); if (searchCoverage.getMaxWaitAfterCoverageFactor() != null) builder.maxWaitAfterCoverageFactor(searchCoverage.getMaxWaitAfterCoverageFactor()); } builder.warmuptime(dispatchWarmup); builder.summaryDecodePolicy(toSummaryDecoding(summaryDecodePolicy)); } private DispatchConfig.SummaryDecodePolicy.Enum toSummaryDecoding(String summaryDecodeType) { return switch (summaryDecodeType.toLowerCase()) { case "eager" -> DispatchConfig.SummaryDecodePolicy.EAGER; case "ondemand","on-demand" -> DispatchConfig.SummaryDecodePolicy.Enum.ONDEMAND; default -> DispatchConfig.SummaryDecodePolicy.Enum.EAGER; }; } @Override public String toString() { return "Indexing cluster '" + getClusterName() + "'"; } /** * Class used to retrieve combined configuration from multiple document databases. * It is not a direct {@link ConfigInstance.Producer} of those configs, * that is handled (by delegating to this) by the {@link IndexedSearchCluster} * which is the parent to this. This avoids building the config multiple times. */ }
```suggestion // Cluster searcher guarantees that there will be one, and only one schema here ```
public Result doSearch2(Query query, Execution execution) { if (query.getTimeLeft() <= 0) return new Result(query, ErrorMessage.createTimeout(String.format("No time left for searching (timeout=%d)", query.getTimeout()))); initializeMissingQueryFields(query); if (documentSelectionQueryParameterCount(query) != 1) { return new Result(query, ErrorMessage.createIllegalQuery("Streaming search requires either " + "streaming.groupname or streaming.selection")); } String schema = query.getModel().getRestrict().iterator().next(); if (query.getTrace().isTraceable(4)) query.trace("Routing to search cluster " + getSearchClusterName() + " and document type " + schema, 4); long timeStartedNanos = tracingOptions.getClock().nanoTimeNow(); int effectiveTraceLevel = inferEffectiveQueryTraceLevel(query); Visitor visitor = visitorFactory.createVisitor(query, getSearchClusterName(), route, schema, effectiveTraceLevel); try { visitor.doSearch(); } catch (ParseException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to parse document selection string: " + e.getMessage())); } catch (TokenMgrException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to tokenize document selection string: " + e.getMessage())); } catch (TimeoutException e) { double elapsedMillis = durationInMillisFromNanoTime(timeStartedNanos); if ((effectiveTraceLevel > 0) && timeoutBadEnoughToBeReported(query, elapsedMillis)) { tracingOptions.getTraceExporter().maybeExport(() -> new TraceDescription(visitor.getTrace(), String.format("Trace of %s which timed out after %.3g seconds", query, elapsedMillis / 1000.0))); } return new Result(query, ErrorMessage.createTimeout(e.getMessage())); } catch (InterruptedException e) { return new Result(query, ErrorMessage.createBackendCommunicationError(e.getMessage())); } return buildResultFromCompletedVisitor(query, visitor); }
public Result doSearch2(Query query, Execution execution) { if (query.getTimeLeft() <= 0) return new Result(query, ErrorMessage.createTimeout(String.format("No time left for searching (timeout=%d)", query.getTimeout()))); initializeMissingQueryFields(query); if (documentSelectionQueryParameterCount(query) != 1) { return new Result(query, ErrorMessage.createIllegalQuery("Streaming search requires either " + "streaming.groupname or streaming.selection")); } String schema = query.getModel().getRestrict().iterator().next(); if (query.getTrace().isTraceable(4)) query.trace("Routing to search cluster " + getSearchClusterName() + " and document type " + schema, 4); long timeStartedNanos = tracingOptions.getClock().nanoTimeNow(); int effectiveTraceLevel = inferEffectiveQueryTraceLevel(query); Visitor visitor = visitorFactory.createVisitor(query, getSearchClusterName(), route, schema, effectiveTraceLevel); try { visitor.doSearch(); } catch (ParseException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to parse document selection string: " + e.getMessage())); } catch (TokenMgrException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to tokenize document selection string: " + e.getMessage())); } catch (TimeoutException e) { double elapsedMillis = durationInMillisFromNanoTime(timeStartedNanos); if ((effectiveTraceLevel > 0) && timeoutBadEnoughToBeReported(query, elapsedMillis)) { tracingOptions.getTraceExporter().maybeExport(() -> new TraceDescription(visitor.getTrace(), String.format("Trace of %s which timed out after %.3g seconds", query, elapsedMillis / 1000.0))); } return new Result(query, ErrorMessage.createTimeout(e.getMessage())); } catch (InterruptedException e) { return new Result(query, ErrorMessage.createBackendCommunicationError(e.getMessage())); } return buildResultFromCompletedVisitor(query, visitor); }
class StreamingSearcher extends VespaBackEndSearcher { private static final CompoundName streamingUserid = CompoundName.from("streaming.userid"); private static final CompoundName streamingGroupname = CompoundName.from("streaming.groupname"); private static final CompoundName streamingSelection = CompoundName.from("streaming.selection"); static final String STREAMING_STATISTICS = "streaming.statistics"; private final VisitorFactory visitorFactory; private final TracingOptions tracingOptions; private static final Logger log = Logger.getLogger(StreamingSearcher.class.getName()); private Route route; /** The configId used to access the searchcluster. */ private String searchClusterName = null; /** The route to the storage cluster. */ private String storageClusterRouteSpec = null; StreamingSearcher(VisitorFactory visitorFactory) { this.visitorFactory = visitorFactory; tracingOptions = TracingOptions.DEFAULT; } StreamingSearcher(VisitorFactory visitorFactory, TracingOptions tracingOptions) { this.visitorFactory = visitorFactory; this.tracingOptions = tracingOptions; } public StreamingSearcher(VespaDocumentAccess access) { this(new VespaVisitorFactory(access)); } private String getSearchClusterName() { return searchClusterName; } private String getStorageClusterRouteSpec() { return storageClusterRouteSpec; } public final void setSearchClusterName(String clusterName) { this.searchClusterName = clusterName; } public final void setStorageClusterRouteSpec(String storageClusterRouteSpec) { this.storageClusterRouteSpec = storageClusterRouteSpec; } @Override protected void doPartialFill(Result result, String summaryClass) { } private double durationInMillisFromNanoTime(long startTimeNanos) { return (tracingOptions.getClock().nanoTimeNow() - startTimeNanos) / (double)TimeUnit.MILLISECONDS.toNanos(1); } private boolean timeoutBadEnoughToBeReported(Query query, double durationMillis) { return (durationMillis > (query.getTimeout() * tracingOptions.getTraceTimeoutMultiplierThreshold())); } private static boolean queryIsLocationConstrained(Query query) { return ((query.properties().getString(streamingUserid) != null) || (query.properties().getString(streamingGroupname) != null)); } private static int documentSelectionQueryParameterCount(Query query) { int paramCount = 0; if (query.properties().getString(streamingUserid) != null) paramCount++; if (query.properties().getString(streamingGroupname) != null) paramCount++; if (query.properties().getString(streamingSelection) != null) paramCount++; return paramCount; } private boolean shouldTraceQuery(Query query) { return (queryIsLocationConstrained(query) && ((query.getTrace().getLevel() > 0) || tracingOptions.getSamplingStrategy().shouldSample())); } private int inferEffectiveQueryTraceLevel(Query query) { return ((query.getTrace().getLevel() == 0) && shouldTraceQuery(query)) ? tracingOptions.getTraceLevelOverride() : query.getTrace().getLevel(); } @Override private void initializeMissingQueryFields(Query query) { lazyTrace(query, 7, "Routing to storage cluster ", getStorageClusterRouteSpec()); if (route == null) { route = Route.parse(getStorageClusterRouteSpec()); } lazyTrace(query, 8, "Route is ", route); lazyTrace(query, 7, "doSearch2(): query docsum class=", query.getPresentation().getSummary(), ", default docsum class=", getDefaultDocsumClass()); if (query.getPresentation().getSummary() == null) { lazyTrace(query, 6, "doSearch2(): No summary class specified in query, using default: ", getDefaultDocsumClass()); query.getPresentation().setSummary(getDefaultDocsumClass()); } else { lazyTrace(query, 6, "doSearch2(): Summary class has been specified in query: ", query.getPresentation().getSummary()); } lazyTrace(query, 8, "doSearch2(): rank properties=", query.getRanking()); lazyTrace(query, 8, "doSearch2(): sort specification=", query .getRanking().getSorting() == null ? null : query.getRanking() .getSorting().fieldOrders()); } private Result buildResultFromCompletedVisitor(Query query, Visitor visitor) { lazyTrace(query, 8, "offset=", query.getOffset(), ", hits=", query.getHits()); Result result = new Result(query); List<SearchResult.Hit> hits = visitor.getHits(); Map<String, DocumentSummary.Summary> summaryMap = visitor.getSummaryMap(); lazyTrace(query, 7, "total hit count = ", visitor.getTotalHitCount(), ", returned hit count = ", hits.size(), ", summary count = ", summaryMap.size()); VisitorStatistics stats = visitor.getStatistics(); result.setTotalHitCount(visitor.getTotalHitCount()); result.setCoverage(new Coverage(stats.getDocumentsVisited(), stats.getDocumentsVisited(), 1, 1)); query.trace(visitor.getStatistics().toString(), false, 2); query.getContext(true).setProperty(STREAMING_STATISTICS, stats); DocsumPacket[] summaryPackets = new DocsumPacket [hits.size()]; int index = 0; boolean skippedEarlierResult = false; for (SearchResult.Hit hit : hits) { if (!verifyDocId(hit.getDocId(), query, skippedEarlierResult)) { skippedEarlierResult = true; continue; } FastHit fastHit = buildSummaryHit(query, hit); result.hits().add(fastHit); DocumentSummary.Summary summary = summaryMap.get(hit.getDocId()); if (summary != null) { DocsumPacket dp = new DocsumPacket(summary.getSummary()); summaryPackets[index] = dp; } else { return new Result(query, ErrorMessage.createBackendCommunicationError("Did not find summary for hit with document id " + hit.getDocId())); } index++; } if (result.isFilled(query.getPresentation().getSummary())) { lazyTrace(query, 8, "Result is filled for summary class ", query.getPresentation().getSummary()); } else { lazyTrace(query, 8, "Result is not filled for summary class ", query.getPresentation().getSummary()); } List<Grouping> groupingList = visitor.getGroupings(); lazyTrace(query, 8, "Grouping list=", groupingList); if ( ! groupingList.isEmpty() ) { GroupingListHit groupHit = new GroupingListHit(groupingList, getDocumentDatabase(query), query); result.hits().add(groupHit); } FillHitsResult fillHitsResult = fillHits(result, summaryPackets, query.getPresentation().getSummary()); int skippedHits = fillHitsResult.skippedHits; if (fillHitsResult.error != null) { result.hits().addError(ErrorMessage.createTimeout(fillHitsResult.error)); return result; } if (skippedHits == 0) { query.trace("All hits have been filled",4); } else { lazyTrace(query, 8, "Skipping some hits for query: ", result.getQuery()); } lazyTrace(query, 8, "Returning result ", result); if (skippedHits > 0) { getLogger().info("skipping " + skippedHits + " hits for query: " + result.getQuery()); result.hits().addError(ErrorMessage.createTimeout("Missing hit summary data for " + skippedHits + " hits")); } return result; } private FastHit buildSummaryHit(Query query, SearchResult.Hit hit) { FastHit fastHit = new FastHit(); fastHit.setQuery(query); fastHit.setSource(getName()); fastHit.setId(hit.getDocId()); fastHit.setRelevance(new Relevance(hit.getRank())); if (hit instanceof SearchResult.HitWithSortBlob sortedHit) { fastHit.setSortData(sortedHit.getSortBlob(), query.getRanking().getSorting()); } if (hit.getMatchFeatures().isPresent()) { fastHit.setField("matchfeatures", new FeatureData(hit.getMatchFeatures().get())); } fastHit.setFillable(); return fastHit; } private static void lazyTrace(Query query, int level, Object... args) { if (query.getTrace().isTraceable(level)) { StringBuilder s = new StringBuilder(); for (Object arg : args) { s.append(arg); } query.trace(s.toString(), level); } } static boolean verifyDocId(String id, Query query, boolean skippedEarlierResult) { String expectedUserId = query.properties().getString(streamingUserid); String expectedGroupName = query.properties().getString(streamingGroupname); Level logLevel = Level.SEVERE; if (skippedEarlierResult) { logLevel = Level.FINE; } DocumentId docId; try { docId = new DocumentId(id); } catch (IllegalArgumentException iae) { log.log(logLevel, "Bad result for " + query + ": " + iae.getMessage()); return false; } if (expectedUserId != null) { long userId; if (docId.getScheme().hasNumber()) { userId = docId.getScheme().getNumber(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (new BigInteger(expectedUserId).longValue() != userId) { log.log(logLevel, "Got result with wrong user ID (expected " + expectedUserId + ") in document ID (" + id + ") for " + query); return false; } } else if (expectedGroupName != null) { String groupName; if (docId.getScheme().hasGroup()) { groupName = docId.getScheme().getGroup(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (!expectedGroupName.equals(groupName)) { log.log(logLevel, "Got result with wrong group name (expected " + expectedGroupName + ") in document ID (" + id + ") for " + query); return false; } } return true; } public Pong ping(Ping ping, Execution execution) { return new Pong(); } private static class VespaVisitorFactory implements StreamingVisitor.VisitorSessionFactory, VisitorFactory { private final VespaDocumentAccess access; private VespaVisitorFactory(VespaDocumentAccess access) { this.access = access; } @Override public VisitorSession createVisitorSession(VisitorParameters params) throws ParseException { return access.createVisitorSession(params); } @Override public Visitor createVisitor(Query query, String searchCluster, Route route, String documentType, int traceLevelOverride) { return new StreamingVisitor(query, searchCluster, route, documentType, this, traceLevelOverride); } } }
class StreamingSearcher extends VespaBackEndSearcher { private static final CompoundName streamingUserid = CompoundName.from("streaming.userid"); private static final CompoundName streamingGroupname = CompoundName.from("streaming.groupname"); private static final CompoundName streamingSelection = CompoundName.from("streaming.selection"); static final String STREAMING_STATISTICS = "streaming.statistics"; private final VisitorFactory visitorFactory; private final TracingOptions tracingOptions; private static final Logger log = Logger.getLogger(StreamingSearcher.class.getName()); private Route route; /** The configId used to access the searchcluster. */ private String searchClusterName = null; /** The route to the storage cluster. */ private String storageClusterRouteSpec = null; StreamingSearcher(VisitorFactory visitorFactory) { this.visitorFactory = visitorFactory; tracingOptions = TracingOptions.DEFAULT; } StreamingSearcher(VisitorFactory visitorFactory, TracingOptions tracingOptions) { this.visitorFactory = visitorFactory; this.tracingOptions = tracingOptions; } public StreamingSearcher(VespaDocumentAccess access) { this(new VespaVisitorFactory(access)); } private String getSearchClusterName() { return searchClusterName; } private String getStorageClusterRouteSpec() { return storageClusterRouteSpec; } public final void setSearchClusterName(String clusterName) { this.searchClusterName = clusterName; } public final void setStorageClusterRouteSpec(String storageClusterRouteSpec) { this.storageClusterRouteSpec = storageClusterRouteSpec; } @Override protected void doPartialFill(Result result, String summaryClass) { } private double durationInMillisFromNanoTime(long startTimeNanos) { return (tracingOptions.getClock().nanoTimeNow() - startTimeNanos) / (double)TimeUnit.MILLISECONDS.toNanos(1); } private boolean timeoutBadEnoughToBeReported(Query query, double durationMillis) { return (durationMillis > (query.getTimeout() * tracingOptions.getTraceTimeoutMultiplierThreshold())); } private static boolean queryIsLocationConstrained(Query query) { return ((query.properties().getString(streamingUserid) != null) || (query.properties().getString(streamingGroupname) != null)); } private static int documentSelectionQueryParameterCount(Query query) { int paramCount = 0; if (query.properties().getString(streamingUserid) != null) paramCount++; if (query.properties().getString(streamingGroupname) != null) paramCount++; if (query.properties().getString(streamingSelection) != null) paramCount++; return paramCount; } private boolean shouldTraceQuery(Query query) { return (queryIsLocationConstrained(query) && ((query.getTrace().getLevel() > 0) || tracingOptions.getSamplingStrategy().shouldSample())); } private int inferEffectiveQueryTraceLevel(Query query) { return ((query.getTrace().getLevel() == 0) && shouldTraceQuery(query)) ? tracingOptions.getTraceLevelOverride() : query.getTrace().getLevel(); } @Override private void initializeMissingQueryFields(Query query) { lazyTrace(query, 7, "Routing to storage cluster ", getStorageClusterRouteSpec()); if (route == null) { route = Route.parse(getStorageClusterRouteSpec()); } lazyTrace(query, 8, "Route is ", route); lazyTrace(query, 7, "doSearch2(): query docsum class=", query.getPresentation().getSummary(), ", default docsum class=", getDefaultDocsumClass()); if (query.getPresentation().getSummary() == null) { lazyTrace(query, 6, "doSearch2(): No summary class specified in query, using default: ", getDefaultDocsumClass()); query.getPresentation().setSummary(getDefaultDocsumClass()); } else { lazyTrace(query, 6, "doSearch2(): Summary class has been specified in query: ", query.getPresentation().getSummary()); } lazyTrace(query, 8, "doSearch2(): rank properties=", query.getRanking()); lazyTrace(query, 8, "doSearch2(): sort specification=", query .getRanking().getSorting() == null ? null : query.getRanking() .getSorting().fieldOrders()); } private Result buildResultFromCompletedVisitor(Query query, Visitor visitor) { lazyTrace(query, 8, "offset=", query.getOffset(), ", hits=", query.getHits()); Result result = new Result(query); List<SearchResult.Hit> hits = visitor.getHits(); Map<String, DocumentSummary.Summary> summaryMap = visitor.getSummaryMap(); lazyTrace(query, 7, "total hit count = ", visitor.getTotalHitCount(), ", returned hit count = ", hits.size(), ", summary count = ", summaryMap.size()); VisitorStatistics stats = visitor.getStatistics(); result.setTotalHitCount(visitor.getTotalHitCount()); result.setCoverage(new Coverage(stats.getDocumentsVisited(), stats.getDocumentsVisited(), 1, 1)); query.trace(visitor.getStatistics().toString(), false, 2); query.getContext(true).setProperty(STREAMING_STATISTICS, stats); DocsumPacket[] summaryPackets = new DocsumPacket [hits.size()]; int index = 0; boolean skippedEarlierResult = false; for (SearchResult.Hit hit : hits) { if (!verifyDocId(hit.getDocId(), query, skippedEarlierResult)) { skippedEarlierResult = true; continue; } FastHit fastHit = buildSummaryHit(query, hit); result.hits().add(fastHit); DocumentSummary.Summary summary = summaryMap.get(hit.getDocId()); if (summary != null) { DocsumPacket dp = new DocsumPacket(summary.getSummary()); summaryPackets[index] = dp; } else { return new Result(query, ErrorMessage.createBackendCommunicationError("Did not find summary for hit with document id " + hit.getDocId())); } index++; } if (result.isFilled(query.getPresentation().getSummary())) { lazyTrace(query, 8, "Result is filled for summary class ", query.getPresentation().getSummary()); } else { lazyTrace(query, 8, "Result is not filled for summary class ", query.getPresentation().getSummary()); } List<Grouping> groupingList = visitor.getGroupings(); lazyTrace(query, 8, "Grouping list=", groupingList); if ( ! groupingList.isEmpty() ) { GroupingListHit groupHit = new GroupingListHit(groupingList, getDocumentDatabase(query), query); result.hits().add(groupHit); } FillHitsResult fillHitsResult = fillHits(result, summaryPackets, query.getPresentation().getSummary()); int skippedHits = fillHitsResult.skippedHits; if (fillHitsResult.error != null) { result.hits().addError(ErrorMessage.createTimeout(fillHitsResult.error)); return result; } if (skippedHits == 0) { query.trace("All hits have been filled",4); } else { lazyTrace(query, 8, "Skipping some hits for query: ", result.getQuery()); } lazyTrace(query, 8, "Returning result ", result); if (skippedHits > 0) { getLogger().info("skipping " + skippedHits + " hits for query: " + result.getQuery()); result.hits().addError(ErrorMessage.createTimeout("Missing hit summary data for " + skippedHits + " hits")); } return result; } private FastHit buildSummaryHit(Query query, SearchResult.Hit hit) { FastHit fastHit = new FastHit(); fastHit.setQuery(query); fastHit.setSource(getName()); fastHit.setId(hit.getDocId()); fastHit.setRelevance(new Relevance(hit.getRank())); if (hit instanceof SearchResult.HitWithSortBlob sortedHit) { fastHit.setSortData(sortedHit.getSortBlob(), query.getRanking().getSorting()); } if (hit.getMatchFeatures().isPresent()) { fastHit.setField("matchfeatures", new FeatureData(hit.getMatchFeatures().get())); } fastHit.setFillable(); return fastHit; } private static void lazyTrace(Query query, int level, Object... args) { if (query.getTrace().isTraceable(level)) { StringBuilder s = new StringBuilder(); for (Object arg : args) { s.append(arg); } query.trace(s.toString(), level); } } static boolean verifyDocId(String id, Query query, boolean skippedEarlierResult) { String expectedUserId = query.properties().getString(streamingUserid); String expectedGroupName = query.properties().getString(streamingGroupname); Level logLevel = Level.SEVERE; if (skippedEarlierResult) { logLevel = Level.FINE; } DocumentId docId; try { docId = new DocumentId(id); } catch (IllegalArgumentException iae) { log.log(logLevel, "Bad result for " + query + ": " + iae.getMessage()); return false; } if (expectedUserId != null) { long userId; if (docId.getScheme().hasNumber()) { userId = docId.getScheme().getNumber(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (new BigInteger(expectedUserId).longValue() != userId) { log.log(logLevel, "Got result with wrong user ID (expected " + expectedUserId + ") in document ID (" + id + ") for " + query); return false; } } else if (expectedGroupName != null) { String groupName; if (docId.getScheme().hasGroup()) { groupName = docId.getScheme().getGroup(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (!expectedGroupName.equals(groupName)) { log.log(logLevel, "Got result with wrong group name (expected " + expectedGroupName + ") in document ID (" + id + ") for " + query); return false; } } return true; } public Pong ping(Ping ping, Execution execution) { return new Pong(); } private static class VespaVisitorFactory implements StreamingVisitor.VisitorSessionFactory, VisitorFactory { private final VespaDocumentAccess access; private VespaVisitorFactory(VespaDocumentAccess access) { this.access = access; } @Override public VisitorSession createVisitorSession(VisitorParameters params) throws ParseException { return access.createVisitorSession(params); } @Override public Visitor createVisitor(Query query, String searchCluster, Route route, String documentType, int traceLevelOverride) { return new StreamingVisitor(query, searchCluster, route, documentType, this, traceLevelOverride); } } }
Thanks
public Result doSearch2(Query query, Execution execution) { if (query.getTimeLeft() <= 0) return new Result(query, ErrorMessage.createTimeout(String.format("No time left for searching (timeout=%d)", query.getTimeout()))); initializeMissingQueryFields(query); if (documentSelectionQueryParameterCount(query) != 1) { return new Result(query, ErrorMessage.createIllegalQuery("Streaming search requires either " + "streaming.groupname or streaming.selection")); } String schema = query.getModel().getRestrict().iterator().next(); if (query.getTrace().isTraceable(4)) query.trace("Routing to search cluster " + getSearchClusterName() + " and document type " + schema, 4); long timeStartedNanos = tracingOptions.getClock().nanoTimeNow(); int effectiveTraceLevel = inferEffectiveQueryTraceLevel(query); Visitor visitor = visitorFactory.createVisitor(query, getSearchClusterName(), route, schema, effectiveTraceLevel); try { visitor.doSearch(); } catch (ParseException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to parse document selection string: " + e.getMessage())); } catch (TokenMgrException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to tokenize document selection string: " + e.getMessage())); } catch (TimeoutException e) { double elapsedMillis = durationInMillisFromNanoTime(timeStartedNanos); if ((effectiveTraceLevel > 0) && timeoutBadEnoughToBeReported(query, elapsedMillis)) { tracingOptions.getTraceExporter().maybeExport(() -> new TraceDescription(visitor.getTrace(), String.format("Trace of %s which timed out after %.3g seconds", query, elapsedMillis / 1000.0))); } return new Result(query, ErrorMessage.createTimeout(e.getMessage())); } catch (InterruptedException e) { return new Result(query, ErrorMessage.createBackendCommunicationError(e.getMessage())); } return buildResultFromCompletedVisitor(query, visitor); }
public Result doSearch2(Query query, Execution execution) { if (query.getTimeLeft() <= 0) return new Result(query, ErrorMessage.createTimeout(String.format("No time left for searching (timeout=%d)", query.getTimeout()))); initializeMissingQueryFields(query); if (documentSelectionQueryParameterCount(query) != 1) { return new Result(query, ErrorMessage.createIllegalQuery("Streaming search requires either " + "streaming.groupname or streaming.selection")); } String schema = query.getModel().getRestrict().iterator().next(); if (query.getTrace().isTraceable(4)) query.trace("Routing to search cluster " + getSearchClusterName() + " and document type " + schema, 4); long timeStartedNanos = tracingOptions.getClock().nanoTimeNow(); int effectiveTraceLevel = inferEffectiveQueryTraceLevel(query); Visitor visitor = visitorFactory.createVisitor(query, getSearchClusterName(), route, schema, effectiveTraceLevel); try { visitor.doSearch(); } catch (ParseException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to parse document selection string: " + e.getMessage())); } catch (TokenMgrException e) { return new Result(query, ErrorMessage.createInvalidQueryParameter("Failed to tokenize document selection string: " + e.getMessage())); } catch (TimeoutException e) { double elapsedMillis = durationInMillisFromNanoTime(timeStartedNanos); if ((effectiveTraceLevel > 0) && timeoutBadEnoughToBeReported(query, elapsedMillis)) { tracingOptions.getTraceExporter().maybeExport(() -> new TraceDescription(visitor.getTrace(), String.format("Trace of %s which timed out after %.3g seconds", query, elapsedMillis / 1000.0))); } return new Result(query, ErrorMessage.createTimeout(e.getMessage())); } catch (InterruptedException e) { return new Result(query, ErrorMessage.createBackendCommunicationError(e.getMessage())); } return buildResultFromCompletedVisitor(query, visitor); }
class StreamingSearcher extends VespaBackEndSearcher { private static final CompoundName streamingUserid = CompoundName.from("streaming.userid"); private static final CompoundName streamingGroupname = CompoundName.from("streaming.groupname"); private static final CompoundName streamingSelection = CompoundName.from("streaming.selection"); static final String STREAMING_STATISTICS = "streaming.statistics"; private final VisitorFactory visitorFactory; private final TracingOptions tracingOptions; private static final Logger log = Logger.getLogger(StreamingSearcher.class.getName()); private Route route; /** The configId used to access the searchcluster. */ private String searchClusterName = null; /** The route to the storage cluster. */ private String storageClusterRouteSpec = null; StreamingSearcher(VisitorFactory visitorFactory) { this.visitorFactory = visitorFactory; tracingOptions = TracingOptions.DEFAULT; } StreamingSearcher(VisitorFactory visitorFactory, TracingOptions tracingOptions) { this.visitorFactory = visitorFactory; this.tracingOptions = tracingOptions; } public StreamingSearcher(VespaDocumentAccess access) { this(new VespaVisitorFactory(access)); } private String getSearchClusterName() { return searchClusterName; } private String getStorageClusterRouteSpec() { return storageClusterRouteSpec; } public final void setSearchClusterName(String clusterName) { this.searchClusterName = clusterName; } public final void setStorageClusterRouteSpec(String storageClusterRouteSpec) { this.storageClusterRouteSpec = storageClusterRouteSpec; } @Override protected void doPartialFill(Result result, String summaryClass) { } private double durationInMillisFromNanoTime(long startTimeNanos) { return (tracingOptions.getClock().nanoTimeNow() - startTimeNanos) / (double)TimeUnit.MILLISECONDS.toNanos(1); } private boolean timeoutBadEnoughToBeReported(Query query, double durationMillis) { return (durationMillis > (query.getTimeout() * tracingOptions.getTraceTimeoutMultiplierThreshold())); } private static boolean queryIsLocationConstrained(Query query) { return ((query.properties().getString(streamingUserid) != null) || (query.properties().getString(streamingGroupname) != null)); } private static int documentSelectionQueryParameterCount(Query query) { int paramCount = 0; if (query.properties().getString(streamingUserid) != null) paramCount++; if (query.properties().getString(streamingGroupname) != null) paramCount++; if (query.properties().getString(streamingSelection) != null) paramCount++; return paramCount; } private boolean shouldTraceQuery(Query query) { return (queryIsLocationConstrained(query) && ((query.getTrace().getLevel() > 0) || tracingOptions.getSamplingStrategy().shouldSample())); } private int inferEffectiveQueryTraceLevel(Query query) { return ((query.getTrace().getLevel() == 0) && shouldTraceQuery(query)) ? tracingOptions.getTraceLevelOverride() : query.getTrace().getLevel(); } @Override private void initializeMissingQueryFields(Query query) { lazyTrace(query, 7, "Routing to storage cluster ", getStorageClusterRouteSpec()); if (route == null) { route = Route.parse(getStorageClusterRouteSpec()); } lazyTrace(query, 8, "Route is ", route); lazyTrace(query, 7, "doSearch2(): query docsum class=", query.getPresentation().getSummary(), ", default docsum class=", getDefaultDocsumClass()); if (query.getPresentation().getSummary() == null) { lazyTrace(query, 6, "doSearch2(): No summary class specified in query, using default: ", getDefaultDocsumClass()); query.getPresentation().setSummary(getDefaultDocsumClass()); } else { lazyTrace(query, 6, "doSearch2(): Summary class has been specified in query: ", query.getPresentation().getSummary()); } lazyTrace(query, 8, "doSearch2(): rank properties=", query.getRanking()); lazyTrace(query, 8, "doSearch2(): sort specification=", query .getRanking().getSorting() == null ? null : query.getRanking() .getSorting().fieldOrders()); } private Result buildResultFromCompletedVisitor(Query query, Visitor visitor) { lazyTrace(query, 8, "offset=", query.getOffset(), ", hits=", query.getHits()); Result result = new Result(query); List<SearchResult.Hit> hits = visitor.getHits(); Map<String, DocumentSummary.Summary> summaryMap = visitor.getSummaryMap(); lazyTrace(query, 7, "total hit count = ", visitor.getTotalHitCount(), ", returned hit count = ", hits.size(), ", summary count = ", summaryMap.size()); VisitorStatistics stats = visitor.getStatistics(); result.setTotalHitCount(visitor.getTotalHitCount()); result.setCoverage(new Coverage(stats.getDocumentsVisited(), stats.getDocumentsVisited(), 1, 1)); query.trace(visitor.getStatistics().toString(), false, 2); query.getContext(true).setProperty(STREAMING_STATISTICS, stats); DocsumPacket[] summaryPackets = new DocsumPacket [hits.size()]; int index = 0; boolean skippedEarlierResult = false; for (SearchResult.Hit hit : hits) { if (!verifyDocId(hit.getDocId(), query, skippedEarlierResult)) { skippedEarlierResult = true; continue; } FastHit fastHit = buildSummaryHit(query, hit); result.hits().add(fastHit); DocumentSummary.Summary summary = summaryMap.get(hit.getDocId()); if (summary != null) { DocsumPacket dp = new DocsumPacket(summary.getSummary()); summaryPackets[index] = dp; } else { return new Result(query, ErrorMessage.createBackendCommunicationError("Did not find summary for hit with document id " + hit.getDocId())); } index++; } if (result.isFilled(query.getPresentation().getSummary())) { lazyTrace(query, 8, "Result is filled for summary class ", query.getPresentation().getSummary()); } else { lazyTrace(query, 8, "Result is not filled for summary class ", query.getPresentation().getSummary()); } List<Grouping> groupingList = visitor.getGroupings(); lazyTrace(query, 8, "Grouping list=", groupingList); if ( ! groupingList.isEmpty() ) { GroupingListHit groupHit = new GroupingListHit(groupingList, getDocumentDatabase(query), query); result.hits().add(groupHit); } FillHitsResult fillHitsResult = fillHits(result, summaryPackets, query.getPresentation().getSummary()); int skippedHits = fillHitsResult.skippedHits; if (fillHitsResult.error != null) { result.hits().addError(ErrorMessage.createTimeout(fillHitsResult.error)); return result; } if (skippedHits == 0) { query.trace("All hits have been filled",4); } else { lazyTrace(query, 8, "Skipping some hits for query: ", result.getQuery()); } lazyTrace(query, 8, "Returning result ", result); if (skippedHits > 0) { getLogger().info("skipping " + skippedHits + " hits for query: " + result.getQuery()); result.hits().addError(ErrorMessage.createTimeout("Missing hit summary data for " + skippedHits + " hits")); } return result; } private FastHit buildSummaryHit(Query query, SearchResult.Hit hit) { FastHit fastHit = new FastHit(); fastHit.setQuery(query); fastHit.setSource(getName()); fastHit.setId(hit.getDocId()); fastHit.setRelevance(new Relevance(hit.getRank())); if (hit instanceof SearchResult.HitWithSortBlob sortedHit) { fastHit.setSortData(sortedHit.getSortBlob(), query.getRanking().getSorting()); } if (hit.getMatchFeatures().isPresent()) { fastHit.setField("matchfeatures", new FeatureData(hit.getMatchFeatures().get())); } fastHit.setFillable(); return fastHit; } private static void lazyTrace(Query query, int level, Object... args) { if (query.getTrace().isTraceable(level)) { StringBuilder s = new StringBuilder(); for (Object arg : args) { s.append(arg); } query.trace(s.toString(), level); } } static boolean verifyDocId(String id, Query query, boolean skippedEarlierResult) { String expectedUserId = query.properties().getString(streamingUserid); String expectedGroupName = query.properties().getString(streamingGroupname); Level logLevel = Level.SEVERE; if (skippedEarlierResult) { logLevel = Level.FINE; } DocumentId docId; try { docId = new DocumentId(id); } catch (IllegalArgumentException iae) { log.log(logLevel, "Bad result for " + query + ": " + iae.getMessage()); return false; } if (expectedUserId != null) { long userId; if (docId.getScheme().hasNumber()) { userId = docId.getScheme().getNumber(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (new BigInteger(expectedUserId).longValue() != userId) { log.log(logLevel, "Got result with wrong user ID (expected " + expectedUserId + ") in document ID (" + id + ") for " + query); return false; } } else if (expectedGroupName != null) { String groupName; if (docId.getScheme().hasGroup()) { groupName = docId.getScheme().getGroup(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (!expectedGroupName.equals(groupName)) { log.log(logLevel, "Got result with wrong group name (expected " + expectedGroupName + ") in document ID (" + id + ") for " + query); return false; } } return true; } public Pong ping(Ping ping, Execution execution) { return new Pong(); } private static class VespaVisitorFactory implements StreamingVisitor.VisitorSessionFactory, VisitorFactory { private final VespaDocumentAccess access; private VespaVisitorFactory(VespaDocumentAccess access) { this.access = access; } @Override public VisitorSession createVisitorSession(VisitorParameters params) throws ParseException { return access.createVisitorSession(params); } @Override public Visitor createVisitor(Query query, String searchCluster, Route route, String documentType, int traceLevelOverride) { return new StreamingVisitor(query, searchCluster, route, documentType, this, traceLevelOverride); } } }
class StreamingSearcher extends VespaBackEndSearcher { private static final CompoundName streamingUserid = CompoundName.from("streaming.userid"); private static final CompoundName streamingGroupname = CompoundName.from("streaming.groupname"); private static final CompoundName streamingSelection = CompoundName.from("streaming.selection"); static final String STREAMING_STATISTICS = "streaming.statistics"; private final VisitorFactory visitorFactory; private final TracingOptions tracingOptions; private static final Logger log = Logger.getLogger(StreamingSearcher.class.getName()); private Route route; /** The configId used to access the searchcluster. */ private String searchClusterName = null; /** The route to the storage cluster. */ private String storageClusterRouteSpec = null; StreamingSearcher(VisitorFactory visitorFactory) { this.visitorFactory = visitorFactory; tracingOptions = TracingOptions.DEFAULT; } StreamingSearcher(VisitorFactory visitorFactory, TracingOptions tracingOptions) { this.visitorFactory = visitorFactory; this.tracingOptions = tracingOptions; } public StreamingSearcher(VespaDocumentAccess access) { this(new VespaVisitorFactory(access)); } private String getSearchClusterName() { return searchClusterName; } private String getStorageClusterRouteSpec() { return storageClusterRouteSpec; } public final void setSearchClusterName(String clusterName) { this.searchClusterName = clusterName; } public final void setStorageClusterRouteSpec(String storageClusterRouteSpec) { this.storageClusterRouteSpec = storageClusterRouteSpec; } @Override protected void doPartialFill(Result result, String summaryClass) { } private double durationInMillisFromNanoTime(long startTimeNanos) { return (tracingOptions.getClock().nanoTimeNow() - startTimeNanos) / (double)TimeUnit.MILLISECONDS.toNanos(1); } private boolean timeoutBadEnoughToBeReported(Query query, double durationMillis) { return (durationMillis > (query.getTimeout() * tracingOptions.getTraceTimeoutMultiplierThreshold())); } private static boolean queryIsLocationConstrained(Query query) { return ((query.properties().getString(streamingUserid) != null) || (query.properties().getString(streamingGroupname) != null)); } private static int documentSelectionQueryParameterCount(Query query) { int paramCount = 0; if (query.properties().getString(streamingUserid) != null) paramCount++; if (query.properties().getString(streamingGroupname) != null) paramCount++; if (query.properties().getString(streamingSelection) != null) paramCount++; return paramCount; } private boolean shouldTraceQuery(Query query) { return (queryIsLocationConstrained(query) && ((query.getTrace().getLevel() > 0) || tracingOptions.getSamplingStrategy().shouldSample())); } private int inferEffectiveQueryTraceLevel(Query query) { return ((query.getTrace().getLevel() == 0) && shouldTraceQuery(query)) ? tracingOptions.getTraceLevelOverride() : query.getTrace().getLevel(); } @Override private void initializeMissingQueryFields(Query query) { lazyTrace(query, 7, "Routing to storage cluster ", getStorageClusterRouteSpec()); if (route == null) { route = Route.parse(getStorageClusterRouteSpec()); } lazyTrace(query, 8, "Route is ", route); lazyTrace(query, 7, "doSearch2(): query docsum class=", query.getPresentation().getSummary(), ", default docsum class=", getDefaultDocsumClass()); if (query.getPresentation().getSummary() == null) { lazyTrace(query, 6, "doSearch2(): No summary class specified in query, using default: ", getDefaultDocsumClass()); query.getPresentation().setSummary(getDefaultDocsumClass()); } else { lazyTrace(query, 6, "doSearch2(): Summary class has been specified in query: ", query.getPresentation().getSummary()); } lazyTrace(query, 8, "doSearch2(): rank properties=", query.getRanking()); lazyTrace(query, 8, "doSearch2(): sort specification=", query .getRanking().getSorting() == null ? null : query.getRanking() .getSorting().fieldOrders()); } private Result buildResultFromCompletedVisitor(Query query, Visitor visitor) { lazyTrace(query, 8, "offset=", query.getOffset(), ", hits=", query.getHits()); Result result = new Result(query); List<SearchResult.Hit> hits = visitor.getHits(); Map<String, DocumentSummary.Summary> summaryMap = visitor.getSummaryMap(); lazyTrace(query, 7, "total hit count = ", visitor.getTotalHitCount(), ", returned hit count = ", hits.size(), ", summary count = ", summaryMap.size()); VisitorStatistics stats = visitor.getStatistics(); result.setTotalHitCount(visitor.getTotalHitCount()); result.setCoverage(new Coverage(stats.getDocumentsVisited(), stats.getDocumentsVisited(), 1, 1)); query.trace(visitor.getStatistics().toString(), false, 2); query.getContext(true).setProperty(STREAMING_STATISTICS, stats); DocsumPacket[] summaryPackets = new DocsumPacket [hits.size()]; int index = 0; boolean skippedEarlierResult = false; for (SearchResult.Hit hit : hits) { if (!verifyDocId(hit.getDocId(), query, skippedEarlierResult)) { skippedEarlierResult = true; continue; } FastHit fastHit = buildSummaryHit(query, hit); result.hits().add(fastHit); DocumentSummary.Summary summary = summaryMap.get(hit.getDocId()); if (summary != null) { DocsumPacket dp = new DocsumPacket(summary.getSummary()); summaryPackets[index] = dp; } else { return new Result(query, ErrorMessage.createBackendCommunicationError("Did not find summary for hit with document id " + hit.getDocId())); } index++; } if (result.isFilled(query.getPresentation().getSummary())) { lazyTrace(query, 8, "Result is filled for summary class ", query.getPresentation().getSummary()); } else { lazyTrace(query, 8, "Result is not filled for summary class ", query.getPresentation().getSummary()); } List<Grouping> groupingList = visitor.getGroupings(); lazyTrace(query, 8, "Grouping list=", groupingList); if ( ! groupingList.isEmpty() ) { GroupingListHit groupHit = new GroupingListHit(groupingList, getDocumentDatabase(query), query); result.hits().add(groupHit); } FillHitsResult fillHitsResult = fillHits(result, summaryPackets, query.getPresentation().getSummary()); int skippedHits = fillHitsResult.skippedHits; if (fillHitsResult.error != null) { result.hits().addError(ErrorMessage.createTimeout(fillHitsResult.error)); return result; } if (skippedHits == 0) { query.trace("All hits have been filled",4); } else { lazyTrace(query, 8, "Skipping some hits for query: ", result.getQuery()); } lazyTrace(query, 8, "Returning result ", result); if (skippedHits > 0) { getLogger().info("skipping " + skippedHits + " hits for query: " + result.getQuery()); result.hits().addError(ErrorMessage.createTimeout("Missing hit summary data for " + skippedHits + " hits")); } return result; } private FastHit buildSummaryHit(Query query, SearchResult.Hit hit) { FastHit fastHit = new FastHit(); fastHit.setQuery(query); fastHit.setSource(getName()); fastHit.setId(hit.getDocId()); fastHit.setRelevance(new Relevance(hit.getRank())); if (hit instanceof SearchResult.HitWithSortBlob sortedHit) { fastHit.setSortData(sortedHit.getSortBlob(), query.getRanking().getSorting()); } if (hit.getMatchFeatures().isPresent()) { fastHit.setField("matchfeatures", new FeatureData(hit.getMatchFeatures().get())); } fastHit.setFillable(); return fastHit; } private static void lazyTrace(Query query, int level, Object... args) { if (query.getTrace().isTraceable(level)) { StringBuilder s = new StringBuilder(); for (Object arg : args) { s.append(arg); } query.trace(s.toString(), level); } } static boolean verifyDocId(String id, Query query, boolean skippedEarlierResult) { String expectedUserId = query.properties().getString(streamingUserid); String expectedGroupName = query.properties().getString(streamingGroupname); Level logLevel = Level.SEVERE; if (skippedEarlierResult) { logLevel = Level.FINE; } DocumentId docId; try { docId = new DocumentId(id); } catch (IllegalArgumentException iae) { log.log(logLevel, "Bad result for " + query + ": " + iae.getMessage()); return false; } if (expectedUserId != null) { long userId; if (docId.getScheme().hasNumber()) { userId = docId.getScheme().getNumber(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (new BigInteger(expectedUserId).longValue() != userId) { log.log(logLevel, "Got result with wrong user ID (expected " + expectedUserId + ") in document ID (" + id + ") for " + query); return false; } } else if (expectedGroupName != null) { String groupName; if (docId.getScheme().hasGroup()) { groupName = docId.getScheme().getGroup(); } else { log.log(logLevel, "Got result with wrong scheme in document ID (" + id + ") for " + query); return false; } if (!expectedGroupName.equals(groupName)) { log.log(logLevel, "Got result with wrong group name (expected " + expectedGroupName + ") in document ID (" + id + ") for " + query); return false; } } return true; } public Pong ping(Ping ping, Execution execution) { return new Pong(); } private static class VespaVisitorFactory implements StreamingVisitor.VisitorSessionFactory, VisitorFactory { private final VespaDocumentAccess access; private VespaVisitorFactory(VespaDocumentAccess access) { this.access = access; } @Override public VisitorSession createVisitorSession(VisitorParameters params) throws ParseException { return access.createVisitorSession(params); } @Override public Visitor createVisitor(Query query, String searchCluster, Route route, String documentType, int traceLevelOverride) { return new StreamingVisitor(query, searchCluster, route, documentType, this, traceLevelOverride); } } }
Consider preserving the information that this is in mode streaming. E.g. _"', streaming schema '"_. The same applies for the other warnings changed in this PR.
private static void warnStreamingGramMatching(String cluster, Schema schema, DeployLogger logger) { for (ImmutableSDField sd : schema.allConcreteFields()) { if (sd.getMatching().getType() == MatchType.GRAM) { logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', schema '" + schema.getName() + "', SD field '" + sd.getName() + "': n-gram matching is not supported for streaming search."); } } }
logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', schema '" + schema.getName() +
private static void warnStreamingGramMatching(String cluster, Schema schema, DeployLogger logger) { for (ImmutableSDField sd : schema.allConcreteFields()) { if (sd.getMatching().getType() == MatchType.GRAM) { logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', streaming schema '" + schema.getName() + "', SD field '" + sd.getName() + "': n-gram matching is not supported for streaming search."); } } }
class StreamingValidator implements Validator { @Override public void validate(Context context) { List<SearchCluster> searchClusters = context.model().getSearchClusters(); for (SearchCluster cluster : searchClusters) { for (SchemaInfo schemaInfo : cluster.schemas().values()) { if (schemaInfo.getIndexMode() == SchemaInfo.IndexMode.STREAMING) { var deployLogger = context.deployState().getDeployLogger(); warnStreamingAttributes(cluster.getClusterName(), schemaInfo.fullSchema(), deployLogger); warnStreamingGramMatching(cluster.getClusterName(), schemaInfo.fullSchema(), deployLogger); failStreamingDocumentReferences(cluster.getClusterName(), cluster.getDocumentDB(schemaInfo.name()).getDerivedConfiguration(), context); } } } } /** * Warn if one or more attributes are defined in a streaming search cluster SD. */ private static void warnStreamingAttributes(String cluster, Schema schema, DeployLogger logger) { for (ImmutableSDField sd : schema.allConcreteFields()) { if (sd.doesAttributing()) { warnStreamingAttribute(cluster, schema.getName(), sd, logger); } } } private static void warnStreamingAttribute(String cluster, String schema, ImmutableSDField sd, DeployLogger logger) { if (sd.getDataType() instanceof NumericDataType) return; if (sd.getDataType() instanceof TensorDataType) { for (var fieldAttribute : sd.getAttributes().values()) { if (fieldAttribute.hnswIndexParams().isPresent()) { logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', schema '" + schema + "', SD field '" + sd.getName() + "': hnsw index is not relevant and not supported, ignoring setting"); } } return; } logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', SD field '" + sd.getName() + "': 'attribute' has same match semantics as 'index'."); } private static void failStreamingDocumentReferences(String cluster, DerivedConfiguration derived, Context context) { for (Attribute attribute : derived.getAttributeFields().attributes()) { DataType dataType = attribute.getDataType(); if (dataType instanceof NewDocumentReferenceDataType) { String errorMessage = String.format("For search cluster '%s', schema '%s': Attribute '%s' has type '%s'. " + "Document references and imported fields are not allowed in streaming search.", cluster, derived.getSchema().getName(), attribute.getName(), dataType.getName()); context.illegal(errorMessage); } } } }
class StreamingValidator implements Validator { @Override public void validate(Context context) { List<SearchCluster> searchClusters = context.model().getSearchClusters(); for (SearchCluster cluster : searchClusters) { for (SchemaInfo schemaInfo : cluster.schemas().values()) { if (schemaInfo.getIndexMode() == SchemaInfo.IndexMode.STREAMING) { var deployLogger = context.deployState().getDeployLogger(); warnStreamingAttributes(cluster.getClusterName(), schemaInfo.fullSchema(), deployLogger); warnStreamingGramMatching(cluster.getClusterName(), schemaInfo.fullSchema(), deployLogger); failStreamingDocumentReferences(cluster.getClusterName(), cluster.getDocumentDB(schemaInfo.name()).getDerivedConfiguration(), context); } } } } /** * Warn if one or more attributes are defined in a streaming search cluster SD. */ private static void warnStreamingAttributes(String cluster, Schema schema, DeployLogger logger) { for (ImmutableSDField sd : schema.allConcreteFields()) { if (sd.doesAttributing()) { warnStreamingAttribute(cluster, schema.getName(), sd, logger); } } } private static void warnStreamingAttribute(String cluster, String schema, ImmutableSDField sd, DeployLogger logger) { if (sd.getDataType() instanceof NumericDataType) return; if (sd.getDataType() instanceof TensorDataType) { for (var fieldAttribute : sd.getAttributes().values()) { if (fieldAttribute.hnswIndexParams().isPresent()) { logger.logApplicationPackage(Level.WARNING, "For search cluster '" + cluster + "', streaming schema '" + schema + "', SD field '" + sd.getName() + "': hnsw index is not relevant and not supported, ignoring setting"); } } return; } } private static void failStreamingDocumentReferences(String cluster, DerivedConfiguration derived, Context context) { for (Attribute attribute : derived.getAttributeFields().attributes()) { DataType dataType = attribute.getDataType(); if (dataType instanceof NewDocumentReferenceDataType) { String errorMessage = String.format("For search cluster '%s', streaming schema '%s': Attribute '%s' has type '%s'. " + "Document references and imported fields are not allowed in streaming search.", cluster, derived.getSchema().getName(), attribute.getName(), dataType.getName()); context.illegal(errorMessage); } } } }
Remove this check because it has already been guaranteed.
private void validate(Map<String, String> properties) throws DdlException { if (properties == null) { throw new DdlException("Please set properties of hive table, " + "they are: database, table and resource"); } Map<String, String> copiedProps = Maps.newHashMap(properties); hiveDb = copiedProps.get(HIVE_DB); if (Strings.isNullOrEmpty(hiveDb)) { throw new DdlException(String.format(PROPERTY_MISSING_MSG, HIVE_DB, HIVE_DB)); } copiedProps.remove(HIVE_DB); hiveTable = copiedProps.get(HIVE_TABLE); if (Strings.isNullOrEmpty(hiveTable)) { throw new DdlException(String.format(PROPERTY_MISSING_MSG, HIVE_TABLE, HIVE_TABLE)); } copiedProps.remove(HIVE_TABLE); String hiveMetastoreUris = copiedProps.get(HIVE_METASTORE_URIS); String resourceName = copiedProps.get(HIVE_RESOURCE); if (Strings.isNullOrEmpty(resourceName)) { throw new DdlException("property " + HIVE_RESOURCE + " must be set"); } if (!Strings.isNullOrEmpty(hiveMetastoreUris)) { copiedProps.remove(HIVE_METASTORE_URIS); LOG.warn("property " + HIVE_METASTORE_URIS + " will be ignored " + "and hive table will be created by using property " + HIVE_RESOURCE + " only."); } if (!Strings.isNullOrEmpty(resourceName)) { copiedProps.remove(HIVE_RESOURCE); Resource resource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName); if (resource == null) { throw new DdlException("hive resource [" + resourceName + "] not exists"); } if (resource.getType() != ResourceType.HIVE) { throw new DdlException("resource [" + resourceName + "] is not hive resource"); } HiveResource hiveResource = (HiveResource) resource; hiveProperties.put(HIVE_METASTORE_URIS, hiveResource.getHiveMetastoreURIs()); this.resourceName = resourceName; org.apache.hadoop.hive.metastore.api.Table hiveTable = Catalog.getCurrentCatalog().getHiveRepository() .getTable(resourceName, this.hiveDb, this.hiveTable); String hiveTableType = hiveTable.getTableType(); if (hiveTableType == null) { throw new DdlException("Unknown hive table type."); } switch (hiveTableType) { case "VIRTUAL_VIEW": throw new DdlException("Hive view table is not supported."); case "EXTERNAL_TABLE": case "MANAGED_TABLE": break; default: throw new DdlException("unsupported hive table type [" + hiveTableType + "]."); } List<FieldSchema> unPartHiveColumns = hiveTable.getSd().getCols(); List<FieldSchema> partHiveColumns = hiveTable.getPartitionKeys(); Map<String, FieldSchema> allHiveColumns = unPartHiveColumns.stream() .collect(Collectors.toMap(FieldSchema::getName, fieldSchema -> fieldSchema)); for (FieldSchema hiveColumn : partHiveColumns) { allHiveColumns.put(hiveColumn.getName(), hiveColumn); } for (Column column : this.fullSchema) { FieldSchema hiveColumn = allHiveColumns.get(column.getName()); if (hiveColumn == null) { throw new DdlException("column [" + column.getName() + "] not exists in hive"); } Set<PrimitiveType> validColumnTypes = getValidColumnType(hiveColumn.getType()); if (!validColumnTypes.contains(column.getPrimitiveType())) { throw new DdlException("can not convert hive column type [" + hiveColumn.getType() + "] to " + "starrocks type [" + column.getPrimitiveType() + "]"); } if (!column.isAllowNull() && !isTypeRead) { throw new DdlException( "hive extern table not support no-nullable column: [" + hiveColumn.getName() + "]"); } } for (FieldSchema partHiveColumn : partHiveColumns) { String columnName = partHiveColumn.getName(); Column partColumn = this.nameToColumn.get(columnName); if (partColumn == null) { throw new DdlException("partition column [" + columnName + "] must exist in column list"); } else { this.partColumnNames.add(columnName); } } for (FieldSchema s : unPartHiveColumns) { this.dataColumnNames.add(s.getName()); } this.hdfsPath = hiveTable.getSd().getLocation(); } if (!copiedProps.isEmpty()) { throw new DdlException("Unknown table properties: " + copiedProps.toString()); } }
if (!Strings.isNullOrEmpty(resourceName)) {
private void validate(Map<String, String> properties) throws DdlException { if (properties == null) { throw new DdlException("Please set properties of hive table, " + "they are: database, table and resource"); } Map<String, String> copiedProps = Maps.newHashMap(properties); hiveDb = copiedProps.get(HIVE_DB); if (Strings.isNullOrEmpty(hiveDb)) { throw new DdlException(String.format(PROPERTY_MISSING_MSG, HIVE_DB, HIVE_DB)); } copiedProps.remove(HIVE_DB); hiveTable = copiedProps.get(HIVE_TABLE); if (Strings.isNullOrEmpty(hiveTable)) { throw new DdlException(String.format(PROPERTY_MISSING_MSG, HIVE_TABLE, HIVE_TABLE)); } copiedProps.remove(HIVE_TABLE); String hiveMetastoreUris = copiedProps.get(HIVE_METASTORE_URIS); String resourceName = copiedProps.get(HIVE_RESOURCE); if (Strings.isNullOrEmpty(resourceName)) { throw new DdlException("property " + HIVE_RESOURCE + " must be set"); } if (!Strings.isNullOrEmpty(hiveMetastoreUris)) { copiedProps.remove(HIVE_METASTORE_URIS); LOG.warn("property " + HIVE_METASTORE_URIS + " will be ignored " + "and hive table will be created by using property " + HIVE_RESOURCE + " only."); } copiedProps.remove(HIVE_RESOURCE); Resource resource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName); if (resource == null) { throw new DdlException("hive resource [" + resourceName + "] not exists"); } if (resource.getType() != ResourceType.HIVE) { throw new DdlException("resource [" + resourceName + "] is not hive resource"); } HiveResource hiveResource = (HiveResource) resource; hiveProperties.put(HIVE_METASTORE_URIS, hiveResource.getHiveMetastoreURIs()); this.resourceName = resourceName; org.apache.hadoop.hive.metastore.api.Table hiveTable = Catalog.getCurrentCatalog().getHiveRepository() .getTable(resourceName, this.hiveDb, this.hiveTable); String hiveTableType = hiveTable.getTableType(); if (hiveTableType == null) { throw new DdlException("Unknown hive table type."); } switch (hiveTableType) { case "VIRTUAL_VIEW": throw new DdlException("Hive view table is not supported."); case "EXTERNAL_TABLE": case "MANAGED_TABLE": break; default: throw new DdlException("unsupported hive table type [" + hiveTableType + "]."); } List<FieldSchema> unPartHiveColumns = hiveTable.getSd().getCols(); List<FieldSchema> partHiveColumns = hiveTable.getPartitionKeys(); Map<String, FieldSchema> allHiveColumns = unPartHiveColumns.stream() .collect(Collectors.toMap(FieldSchema::getName, fieldSchema -> fieldSchema)); for (FieldSchema hiveColumn : partHiveColumns) { allHiveColumns.put(hiveColumn.getName(), hiveColumn); } for (Column column : this.fullSchema) { FieldSchema hiveColumn = allHiveColumns.get(column.getName()); if (hiveColumn == null) { throw new DdlException("column [" + column.getName() + "] not exists in hive"); } Set<PrimitiveType> validColumnTypes = getValidColumnType(hiveColumn.getType()); if (!validColumnTypes.contains(column.getPrimitiveType())) { throw new DdlException("can not convert hive column type [" + hiveColumn.getType() + "] to " + "starrocks type [" + column.getPrimitiveType() + "]"); } if (!column.isAllowNull() && !isTypeRead) { throw new DdlException( "hive extern table not support no-nullable column: [" + hiveColumn.getName() + "]"); } } for (FieldSchema partHiveColumn : partHiveColumns) { String columnName = partHiveColumn.getName(); Column partColumn = this.nameToColumn.get(columnName); if (partColumn == null) { throw new DdlException("partition column [" + columnName + "] must exist in column list"); } else { this.partColumnNames.add(columnName); } } for (FieldSchema s : unPartHiveColumns) { this.dataColumnNames.add(s.getName()); } this.hdfsPath = hiveTable.getSd().getLocation(); if (!copiedProps.isEmpty()) { throw new DdlException("Unknown table properties: " + copiedProps.toString()); } }
class HiveTable extends Table { private static final Logger LOG = LogManager.getLogger(HiveTable.class); private static final String PROPERTY_MISSING_MSG = "Hive %s is null. Please add properties('%s'='xxx') when create table"; private static final String JSON_KEY_HIVE_DB = "hiveDb"; private static final String JSON_KEY_HIVE_TABLE = "hiveTable"; private static final String JSON_KEY_RESOURCE_NAME = "resourceName"; private static final String JSON_KEY_HDFS_PATH = "hdfsPath"; private static final String JSON_KEY_PART_COLUMN_NAMES = "partColumnNames"; private static final String JSON_KEY_DATA_COLUMN_NAMES = "dataColumnNames"; private static final String JSON_KEY_HIVE_PROPERTIES = "hiveProperties"; private static final String HIVE_DB = "database"; private static final String HIVE_TABLE = "table"; private static final String HIVE_METASTORE_URIS = "hive.metastore.uris"; private static final String HIVE_RESOURCE = "resource"; private String hiveDb; private String hiveTable; private String resourceName; private String hdfsPath; private List<String> partColumnNames = Lists.newArrayList(); private List<String> dataColumnNames = Lists.newArrayList(); private Map<String, String> hiveProperties = Maps.newHashMap(); public HiveTable() { super(TableType.HIVE); } public HiveTable(long id, String name, List<Column> schema, Map<String, String> properties) throws DdlException { super(id, name, TableType.HIVE, schema); validate(properties); } public String getHiveDbTable() { return String.format("%s.%s", hiveDb, hiveTable); } public String getResourceName() { return resourceName; } public String getHiveDb() { return hiveDb; } public String getHiveTable() { return hiveTable; } public List<Column> getPartitionColumns() { List<Column> partColumns = Lists.newArrayList(); for (String columnName : partColumnNames) { partColumns.add(nameToColumn.get(columnName)); } return partColumns; } public List<String> getPartitionColumnNames() { return partColumnNames; } public List<String> getDataColumnNames() { return dataColumnNames; } public String getHdfsPath() { return this.hdfsPath; } public Map<String, String> getHiveProperties() { return hiveProperties; } public Map<PartitionKey, Long> getPartitionKeys() throws DdlException { List<Column> partColumns = getPartitionColumns(); return Catalog.getCurrentCatalog().getHiveRepository() .getPartitionKeys(resourceName, hiveDb, hiveTable, partColumns); } public HivePartition getPartition(PartitionKey partitionKey) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartition(resourceName, hiveDb, hiveTable, partitionKey); } public List<HivePartition> getPartitions(List<PartitionKey> partitionKeys) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartitions(resourceName, hiveDb, hiveTable, partitionKeys); } public HiveTableStats getTableStats() throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository().getTableStats(resourceName, hiveDb, hiveTable); } public List<HivePartitionStats> getPartitionsStats(List<PartitionKey> partitionKeys) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartitionsStats(resourceName, hiveDb, hiveTable, partitionKeys); } public Map<String, HiveColumnStats> getTableLevelColumnStats(List<String> columnNames) throws DdlException { List<String> allColumnNames = new ArrayList<>(this.nameToColumn.keySet()); Map<String, HiveColumnStats> allColumnStats = Catalog.getCurrentCatalog().getHiveRepository() .getTableLevelColumnStats(resourceName, hiveDb, hiveTable, getPartitionColumns(), allColumnNames); Map<String, HiveColumnStats> result = Maps.newHashMapWithExpectedSize(columnNames.size()); for (String columnName : columnNames) { result.put(columnName, allColumnStats.get(columnName)); } return result; } public void refreshTableCache() throws DdlException { Catalog.getCurrentCatalog().getHiveRepository() .refreshTableCache(resourceName, hiveDb, hiveTable, getPartitionColumns(), new ArrayList<>(nameToColumn.keySet())); } public void refreshPartCache(List<String> partNames) throws DdlException { Catalog.getCurrentCatalog().getHiveRepository() .refreshPartitionCache(resourceName, hiveDb, hiveTable, partNames); } /** * 1. get from table stats * 2. get from partition stats if table stats is missing */ public long getRowCount() { HiveTableStats tableStats = null; try { tableStats = getTableStats(); } catch (DdlException e) { LOG.warn("table {} gets stats failed", name, e); return 0; } long numRows = tableStats.getNumRows(); long tableTotalFileBytes = tableStats.getTotalFileBytes(); if (numRows < 0 || tableTotalFileBytes <= 0 || (numRows == 0 && tableTotalFileBytes != 0)) { numRows = -1; } if (numRows != -1) { return numRows; } numRows = getPartitionStatsRowCount(null); if (numRows != -1) { return numRows; } return 0; } /** * Returns an estimated row count for the given number of file bytes. The row count is * extrapolated using the table-level row count and file bytes statistics. */ public long getExtrapolatedRowCount(long totalPartitionFileBytes) { if (totalPartitionFileBytes == 0) { return 0; } if (totalPartitionFileBytes < 0) { return -1; } HiveTableStats tableStats = null; try { tableStats = getTableStats(); } catch (DdlException e) { LOG.warn("table {} gets stats failed", name, e); return -1; } long numRows = tableStats.getNumRows(); long totalFileBytes = tableStats.getTotalFileBytes(); if (numRows < 0 || totalFileBytes <= 0 || (numRows == 0 && totalFileBytes != 0)) { return -1; } double bytesPerRow = totalFileBytes / (double) numRows; double extrapolatedNumRows = totalPartitionFileBytes / bytesPerRow; return Math.max(1, Math.round(extrapolatedNumRows)); } /** * Computes and returns the number of rows scanned based on the per-partition row count stats * TODO: consider missing or corrupted partition stats */ public long getPartitionStatsRowCount(List<PartitionKey> partitions) { if (partitions == null) { try { partitions = Lists.newArrayList(getPartitionKeys().keySet()); } catch (DdlException e) { LOG.warn("table {} gets partitions failed.", name, e); return -1; } } if (partitions.isEmpty()) { return 0; } long numRows = -1; List<HivePartitionStats> partitionsStats = Lists.newArrayList(); try { partitionsStats = getPartitionsStats(partitions); } catch (DdlException e) { LOG.warn("table {} gets partitions stats failed.", name, e); } for (int i = 0; i < partitionsStats.size(); i++) { long partNumRows = partitionsStats.get(i).getNumRows(); long partTotalFileBytes = partitionsStats.get(i).getTotalFileBytes(); if (partNumRows > -1) { if (numRows == -1) { numRows = 0; } numRows += partNumRows; } else { LOG.debug("table {} partition {} stats abnormal. num rows: {}, total file bytes: {}", name, partitions.get(i), partNumRows, partTotalFileBytes); } } return numRows; } private Set<PrimitiveType> getValidColumnType(String hiveType) { if (hiveType == null) { return Sets.newHashSet(); } hiveType = Utils.getTypeKeyword(hiveType); String typeUpperCase = hiveType.toUpperCase(); switch (typeUpperCase) { case "TINYINT": return Sets.newHashSet(PrimitiveType.TINYINT); case "SMALLINT": return Sets.newHashSet(PrimitiveType.SMALLINT); case "INT": case "INTEGER": return Sets.newHashSet(PrimitiveType.INT); case "BIGINT": return Sets.newHashSet(PrimitiveType.BIGINT); case "FLOAT": return Sets.newHashSet(PrimitiveType.FLOAT); case "DOUBLE": case "DOUBLE PRECISION": return Sets.newHashSet(PrimitiveType.DOUBLE); case "DECIMAL": case "NUMERIC": return Sets.newHashSet(PrimitiveType.DECIMALV2, PrimitiveType.DECIMAL32, PrimitiveType.DECIMAL64, PrimitiveType.DECIMAL128); case "TIMESTAMP": return Sets.newHashSet(PrimitiveType.DATETIME); case "DATE": return Sets.newHashSet(PrimitiveType.DATE); case "STRING": case "VARCHAR": case "BINARY": return Sets.newHashSet(PrimitiveType.VARCHAR); case "CHAR": return Sets.newHashSet(PrimitiveType.CHAR, PrimitiveType.VARCHAR); case "BOOLEAN": return Sets.newHashSet(PrimitiveType.BOOLEAN); default: return Sets.newHashSet(); } } @Override public TTableDescriptor toThrift(List<ReferencedPartitionInfo> partitions) { Preconditions.checkNotNull(partitions); THdfsTable tHdfsTable = new THdfsTable(); tHdfsTable.setHdfs_base_dir(hdfsPath); Set<String> partitionColumnNames = Sets.newHashSet(); List<TColumn> tPartitionColumns = Lists.newArrayList(); List<TColumn> tColumns = Lists.newArrayList(); for (Column column : getPartitionColumns()) { tPartitionColumns.add(column.toThrift()); partitionColumnNames.add(column.getName()); } for (Column column : getBaseSchema()) { if (partitionColumnNames.contains(column.getName())) { continue; } tColumns.add(column.toThrift()); } tHdfsTable.setColumns(tColumns); if (!tPartitionColumns.isEmpty()) { tHdfsTable.setPartition_columns(tPartitionColumns); } List<PartitionKey> partitionKeys = Lists.newArrayList(); for (ReferencedPartitionInfo partition : partitions) { partitionKeys.add(partition.getKey()); } List<HivePartition> hivePartitions; try { hivePartitions = getPartitions(partitionKeys); } catch (DdlException e) { LOG.warn("table {} gets partition info failed.", name, e); return null; } for (int i = 0; i < hivePartitions.size(); i++) { ReferencedPartitionInfo info = partitions.get(i); PartitionKey key = info.getKey(); long partitionId = info.getId(); THdfsPartition tPartition = new THdfsPartition(); tPartition.setFile_format(hivePartitions.get(i).getFormat().toThrift()); List<LiteralExpr> keys = key.getKeys(); keys.forEach(v -> v.setUseVectorized(true)); tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList())); THdfsPartitionLocation tPartitionLocation = new THdfsPartitionLocation(); tPartitionLocation.setPrefix_index(-1); tPartitionLocation.setSuffix(hivePartitions.get(i).getFullPath()); tPartition.setLocation(tPartitionLocation); tHdfsTable.putToPartitions(partitionId, tPartition); } TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.HDFS_TABLE, fullSchema.size(), 0, hiveTable, hiveDb); tTableDescriptor.setHdfsTable(tHdfsTable); return tTableDescriptor; } @Override public void write(DataOutput out) throws IOException { super.write(out); JsonObject jsonObject = new JsonObject(); jsonObject.addProperty(JSON_KEY_HIVE_DB, hiveDb); jsonObject.addProperty(JSON_KEY_HIVE_TABLE, hiveTable); if (!Strings.isNullOrEmpty(resourceName)) { jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName); } if (!Strings.isNullOrEmpty(hdfsPath)) { jsonObject.addProperty(JSON_KEY_HDFS_PATH, hdfsPath); } if (!partColumnNames.isEmpty()) { JsonArray jPartColumnNames = new JsonArray(); for (String partColName : partColumnNames) { jPartColumnNames.add(partColName); } jsonObject.add(JSON_KEY_PART_COLUMN_NAMES, jPartColumnNames); } if (!dataColumnNames.isEmpty()) { JsonArray jDataColumnNames = new JsonArray(); for (String dataColumnName : dataColumnNames) { jDataColumnNames.add(dataColumnName); } jsonObject.add(JSON_KEY_DATA_COLUMN_NAMES, jDataColumnNames); } if (!hiveProperties.isEmpty()) { JsonObject jHiveProperties = new JsonObject(); for (Map.Entry<String, String> entry : hiveProperties.entrySet()) { jHiveProperties.addProperty(entry.getKey(), entry.getValue()); } jsonObject.add(JSON_KEY_HIVE_PROPERTIES, jHiveProperties); } Text.writeString(out, jsonObject.toString()); } public void readFields(DataInput in) throws IOException { super.readFields(in); if (Catalog.getCurrentCatalogStarRocksJournalVersion() >= StarRocksFEMetaVersion.VERSION_3) { String json = Text.readString(in); JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject(); hiveDb = jsonObject.getAsJsonPrimitive(JSON_KEY_HIVE_DB).getAsString(); hiveTable = jsonObject.getAsJsonPrimitive(JSON_KEY_HIVE_TABLE).getAsString(); if (jsonObject.has(JSON_KEY_RESOURCE_NAME)) { resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString(); } if (jsonObject.has(JSON_KEY_HDFS_PATH)) { hdfsPath = jsonObject.getAsJsonPrimitive(JSON_KEY_HDFS_PATH).getAsString(); } if (jsonObject.has(JSON_KEY_PART_COLUMN_NAMES)) { JsonArray jPartColumnNames = jsonObject.getAsJsonArray(JSON_KEY_PART_COLUMN_NAMES); for (int i = 0; i < jPartColumnNames.size(); i++) { partColumnNames.add(jPartColumnNames.get(i).getAsString()); } } if (jsonObject.has(JSON_KEY_HIVE_PROPERTIES)) { JsonObject jHiveProperties = jsonObject.getAsJsonObject(JSON_KEY_HIVE_PROPERTIES); for (Map.Entry<String, JsonElement> entry : jHiveProperties.entrySet()) { hiveProperties.put(entry.getKey(), entry.getValue().getAsString()); } } if (jsonObject.has(JSON_KEY_DATA_COLUMN_NAMES)) { JsonArray jDataColumnNames = jsonObject.getAsJsonArray(JSON_KEY_DATA_COLUMN_NAMES); for (int i = 0; i < jDataColumnNames.size(); i++) { dataColumnNames.add(jDataColumnNames.get(i).getAsString()); } } else { HashSet<String> partColumnSet = new HashSet<>(partColumnNames); for (Column col : fullSchema) { if (!partColumnSet.contains(col.getName())) { dataColumnNames.add(col.getName()); } } } } else { hiveDb = Text.readString(in); hiveTable = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { String key = Text.readString(in); String val = Text.readString(in); hiveProperties.put(key, val); } } } @Override public void onDrop() { if (this.resourceName != null) { Catalog.getCurrentCatalog().getHiveRepository(). clearCache(this.resourceName, this.hiveDb, this.hiveTable); } } }
class HiveTable extends Table { private static final Logger LOG = LogManager.getLogger(HiveTable.class); private static final String PROPERTY_MISSING_MSG = "Hive %s is null. Please add properties('%s'='xxx') when create table"; private static final String JSON_KEY_HIVE_DB = "hiveDb"; private static final String JSON_KEY_HIVE_TABLE = "hiveTable"; private static final String JSON_KEY_RESOURCE_NAME = "resourceName"; private static final String JSON_KEY_HDFS_PATH = "hdfsPath"; private static final String JSON_KEY_PART_COLUMN_NAMES = "partColumnNames"; private static final String JSON_KEY_DATA_COLUMN_NAMES = "dataColumnNames"; private static final String JSON_KEY_HIVE_PROPERTIES = "hiveProperties"; private static final String HIVE_DB = "database"; private static final String HIVE_TABLE = "table"; private static final String HIVE_METASTORE_URIS = "hive.metastore.uris"; private static final String HIVE_RESOURCE = "resource"; private String hiveDb; private String hiveTable; private String resourceName; private String hdfsPath; private List<String> partColumnNames = Lists.newArrayList(); private List<String> dataColumnNames = Lists.newArrayList(); private Map<String, String> hiveProperties = Maps.newHashMap(); public HiveTable() { super(TableType.HIVE); } public HiveTable(long id, String name, List<Column> schema, Map<String, String> properties) throws DdlException { super(id, name, TableType.HIVE, schema); validate(properties); } public String getHiveDbTable() { return String.format("%s.%s", hiveDb, hiveTable); } public String getResourceName() { return resourceName; } public String getHiveDb() { return hiveDb; } public String getHiveTable() { return hiveTable; } public List<Column> getPartitionColumns() { List<Column> partColumns = Lists.newArrayList(); for (String columnName : partColumnNames) { partColumns.add(nameToColumn.get(columnName)); } return partColumns; } public List<String> getPartitionColumnNames() { return partColumnNames; } public List<String> getDataColumnNames() { return dataColumnNames; } public String getHdfsPath() { return this.hdfsPath; } public Map<String, String> getHiveProperties() { return hiveProperties; } public Map<PartitionKey, Long> getPartitionKeys() throws DdlException { List<Column> partColumns = getPartitionColumns(); return Catalog.getCurrentCatalog().getHiveRepository() .getPartitionKeys(resourceName, hiveDb, hiveTable, partColumns); } public HivePartition getPartition(PartitionKey partitionKey) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartition(resourceName, hiveDb, hiveTable, partitionKey); } public List<HivePartition> getPartitions(List<PartitionKey> partitionKeys) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartitions(resourceName, hiveDb, hiveTable, partitionKeys); } public HiveTableStats getTableStats() throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository().getTableStats(resourceName, hiveDb, hiveTable); } public List<HivePartitionStats> getPartitionsStats(List<PartitionKey> partitionKeys) throws DdlException { return Catalog.getCurrentCatalog().getHiveRepository() .getPartitionsStats(resourceName, hiveDb, hiveTable, partitionKeys); } public Map<String, HiveColumnStats> getTableLevelColumnStats(List<String> columnNames) throws DdlException { List<String> allColumnNames = new ArrayList<>(this.nameToColumn.keySet()); Map<String, HiveColumnStats> allColumnStats = Catalog.getCurrentCatalog().getHiveRepository() .getTableLevelColumnStats(resourceName, hiveDb, hiveTable, getPartitionColumns(), allColumnNames); Map<String, HiveColumnStats> result = Maps.newHashMapWithExpectedSize(columnNames.size()); for (String columnName : columnNames) { result.put(columnName, allColumnStats.get(columnName)); } return result; } public void refreshTableCache() throws DdlException { Catalog.getCurrentCatalog().getHiveRepository() .refreshTableCache(resourceName, hiveDb, hiveTable, getPartitionColumns(), new ArrayList<>(nameToColumn.keySet())); } public void refreshPartCache(List<String> partNames) throws DdlException { Catalog.getCurrentCatalog().getHiveRepository() .refreshPartitionCache(resourceName, hiveDb, hiveTable, partNames); } /** * 1. get from table stats * 2. get from partition stats if table stats is missing */ public long getRowCount() { HiveTableStats tableStats = null; try { tableStats = getTableStats(); } catch (DdlException e) { LOG.warn("table {} gets stats failed", name, e); return 0; } long numRows = tableStats.getNumRows(); long tableTotalFileBytes = tableStats.getTotalFileBytes(); if (numRows < 0 || tableTotalFileBytes <= 0 || (numRows == 0 && tableTotalFileBytes != 0)) { numRows = -1; } if (numRows != -1) { return numRows; } numRows = getPartitionStatsRowCount(null); if (numRows != -1) { return numRows; } return 0; } /** * Returns an estimated row count for the given number of file bytes. The row count is * extrapolated using the table-level row count and file bytes statistics. */ public long getExtrapolatedRowCount(long totalPartitionFileBytes) { if (totalPartitionFileBytes == 0) { return 0; } if (totalPartitionFileBytes < 0) { return -1; } HiveTableStats tableStats = null; try { tableStats = getTableStats(); } catch (DdlException e) { LOG.warn("table {} gets stats failed", name, e); return -1; } long numRows = tableStats.getNumRows(); long totalFileBytes = tableStats.getTotalFileBytes(); if (numRows < 0 || totalFileBytes <= 0 || (numRows == 0 && totalFileBytes != 0)) { return -1; } double bytesPerRow = totalFileBytes / (double) numRows; double extrapolatedNumRows = totalPartitionFileBytes / bytesPerRow; return Math.max(1, Math.round(extrapolatedNumRows)); } /** * Computes and returns the number of rows scanned based on the per-partition row count stats * TODO: consider missing or corrupted partition stats */ public long getPartitionStatsRowCount(List<PartitionKey> partitions) { if (partitions == null) { try { partitions = Lists.newArrayList(getPartitionKeys().keySet()); } catch (DdlException e) { LOG.warn("table {} gets partitions failed.", name, e); return -1; } } if (partitions.isEmpty()) { return 0; } long numRows = -1; List<HivePartitionStats> partitionsStats = Lists.newArrayList(); try { partitionsStats = getPartitionsStats(partitions); } catch (DdlException e) { LOG.warn("table {} gets partitions stats failed.", name, e); } for (int i = 0; i < partitionsStats.size(); i++) { long partNumRows = partitionsStats.get(i).getNumRows(); long partTotalFileBytes = partitionsStats.get(i).getTotalFileBytes(); if (partNumRows > -1) { if (numRows == -1) { numRows = 0; } numRows += partNumRows; } else { LOG.debug("table {} partition {} stats abnormal. num rows: {}, total file bytes: {}", name, partitions.get(i), partNumRows, partTotalFileBytes); } } return numRows; } private Set<PrimitiveType> getValidColumnType(String hiveType) { if (hiveType == null) { return Sets.newHashSet(); } hiveType = Utils.getTypeKeyword(hiveType); String typeUpperCase = hiveType.toUpperCase(); switch (typeUpperCase) { case "TINYINT": return Sets.newHashSet(PrimitiveType.TINYINT); case "SMALLINT": return Sets.newHashSet(PrimitiveType.SMALLINT); case "INT": case "INTEGER": return Sets.newHashSet(PrimitiveType.INT); case "BIGINT": return Sets.newHashSet(PrimitiveType.BIGINT); case "FLOAT": return Sets.newHashSet(PrimitiveType.FLOAT); case "DOUBLE": case "DOUBLE PRECISION": return Sets.newHashSet(PrimitiveType.DOUBLE); case "DECIMAL": case "NUMERIC": return Sets.newHashSet(PrimitiveType.DECIMALV2, PrimitiveType.DECIMAL32, PrimitiveType.DECIMAL64, PrimitiveType.DECIMAL128); case "TIMESTAMP": return Sets.newHashSet(PrimitiveType.DATETIME); case "DATE": return Sets.newHashSet(PrimitiveType.DATE); case "STRING": case "VARCHAR": case "BINARY": return Sets.newHashSet(PrimitiveType.VARCHAR); case "CHAR": return Sets.newHashSet(PrimitiveType.CHAR, PrimitiveType.VARCHAR); case "BOOLEAN": return Sets.newHashSet(PrimitiveType.BOOLEAN); default: return Sets.newHashSet(); } } @Override public TTableDescriptor toThrift(List<ReferencedPartitionInfo> partitions) { Preconditions.checkNotNull(partitions); THdfsTable tHdfsTable = new THdfsTable(); tHdfsTable.setHdfs_base_dir(hdfsPath); Set<String> partitionColumnNames = Sets.newHashSet(); List<TColumn> tPartitionColumns = Lists.newArrayList(); List<TColumn> tColumns = Lists.newArrayList(); for (Column column : getPartitionColumns()) { tPartitionColumns.add(column.toThrift()); partitionColumnNames.add(column.getName()); } for (Column column : getBaseSchema()) { if (partitionColumnNames.contains(column.getName())) { continue; } tColumns.add(column.toThrift()); } tHdfsTable.setColumns(tColumns); if (!tPartitionColumns.isEmpty()) { tHdfsTable.setPartition_columns(tPartitionColumns); } List<PartitionKey> partitionKeys = Lists.newArrayList(); for (ReferencedPartitionInfo partition : partitions) { partitionKeys.add(partition.getKey()); } List<HivePartition> hivePartitions; try { hivePartitions = getPartitions(partitionKeys); } catch (DdlException e) { LOG.warn("table {} gets partition info failed.", name, e); return null; } for (int i = 0; i < hivePartitions.size(); i++) { ReferencedPartitionInfo info = partitions.get(i); PartitionKey key = info.getKey(); long partitionId = info.getId(); THdfsPartition tPartition = new THdfsPartition(); tPartition.setFile_format(hivePartitions.get(i).getFormat().toThrift()); List<LiteralExpr> keys = key.getKeys(); keys.forEach(v -> v.setUseVectorized(true)); tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList())); THdfsPartitionLocation tPartitionLocation = new THdfsPartitionLocation(); tPartitionLocation.setPrefix_index(-1); tPartitionLocation.setSuffix(hivePartitions.get(i).getFullPath()); tPartition.setLocation(tPartitionLocation); tHdfsTable.putToPartitions(partitionId, tPartition); } TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.HDFS_TABLE, fullSchema.size(), 0, hiveTable, hiveDb); tTableDescriptor.setHdfsTable(tHdfsTable); return tTableDescriptor; } @Override public void write(DataOutput out) throws IOException { super.write(out); JsonObject jsonObject = new JsonObject(); jsonObject.addProperty(JSON_KEY_HIVE_DB, hiveDb); jsonObject.addProperty(JSON_KEY_HIVE_TABLE, hiveTable); if (!Strings.isNullOrEmpty(resourceName)) { jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName); } if (!Strings.isNullOrEmpty(hdfsPath)) { jsonObject.addProperty(JSON_KEY_HDFS_PATH, hdfsPath); } if (!partColumnNames.isEmpty()) { JsonArray jPartColumnNames = new JsonArray(); for (String partColName : partColumnNames) { jPartColumnNames.add(partColName); } jsonObject.add(JSON_KEY_PART_COLUMN_NAMES, jPartColumnNames); } if (!dataColumnNames.isEmpty()) { JsonArray jDataColumnNames = new JsonArray(); for (String dataColumnName : dataColumnNames) { jDataColumnNames.add(dataColumnName); } jsonObject.add(JSON_KEY_DATA_COLUMN_NAMES, jDataColumnNames); } if (!hiveProperties.isEmpty()) { JsonObject jHiveProperties = new JsonObject(); for (Map.Entry<String, String> entry : hiveProperties.entrySet()) { jHiveProperties.addProperty(entry.getKey(), entry.getValue()); } jsonObject.add(JSON_KEY_HIVE_PROPERTIES, jHiveProperties); } Text.writeString(out, jsonObject.toString()); } public void readFields(DataInput in) throws IOException { super.readFields(in); if (Catalog.getCurrentCatalogStarRocksJournalVersion() >= StarRocksFEMetaVersion.VERSION_3) { String json = Text.readString(in); JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject(); hiveDb = jsonObject.getAsJsonPrimitive(JSON_KEY_HIVE_DB).getAsString(); hiveTable = jsonObject.getAsJsonPrimitive(JSON_KEY_HIVE_TABLE).getAsString(); if (jsonObject.has(JSON_KEY_RESOURCE_NAME)) { resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString(); } if (jsonObject.has(JSON_KEY_HDFS_PATH)) { hdfsPath = jsonObject.getAsJsonPrimitive(JSON_KEY_HDFS_PATH).getAsString(); } if (jsonObject.has(JSON_KEY_PART_COLUMN_NAMES)) { JsonArray jPartColumnNames = jsonObject.getAsJsonArray(JSON_KEY_PART_COLUMN_NAMES); for (int i = 0; i < jPartColumnNames.size(); i++) { partColumnNames.add(jPartColumnNames.get(i).getAsString()); } } if (jsonObject.has(JSON_KEY_HIVE_PROPERTIES)) { JsonObject jHiveProperties = jsonObject.getAsJsonObject(JSON_KEY_HIVE_PROPERTIES); for (Map.Entry<String, JsonElement> entry : jHiveProperties.entrySet()) { hiveProperties.put(entry.getKey(), entry.getValue().getAsString()); } } if (jsonObject.has(JSON_KEY_DATA_COLUMN_NAMES)) { JsonArray jDataColumnNames = jsonObject.getAsJsonArray(JSON_KEY_DATA_COLUMN_NAMES); for (int i = 0; i < jDataColumnNames.size(); i++) { dataColumnNames.add(jDataColumnNames.get(i).getAsString()); } } else { HashSet<String> partColumnSet = new HashSet<>(partColumnNames); for (Column col : fullSchema) { if (!partColumnSet.contains(col.getName())) { dataColumnNames.add(col.getName()); } } } } else { hiveDb = Text.readString(in); hiveTable = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { String key = Text.readString(in); String val = Text.readString(in); hiveProperties.put(key, val); } } } @Override public void onDrop() { if (this.resourceName != null) { Catalog.getCurrentCatalog().getHiveRepository(). clearCache(this.resourceName, this.hiveDb, this.hiveTable); } } }
Should this be `text/plain; version=0.0.4` for Prometheus (c.f. the [exposition format docs](https://prometheus.io/docs/instrumenting/exposition_formats/#basic-info)) or is this not necessary for a consumer?
private String resolveContentType(URI requestUri) { if (resolvePath(requestUri).equals(HISTOGRAMS_PATH) || isPrometheusRequest(requestUri.getQuery())) { return "text/plain; charset=utf-8"; } else { return "application/json"; } }
if (resolvePath(requestUri).equals(HISTOGRAMS_PATH) || isPrometheusRequest(requestUri.getQuery())) {
private String resolveContentType(URI requestUri) { if (resolvePath(requestUri).equals(HISTOGRAMS_PATH) || isPrometheusRequest(requestUri.getQuery())) { return "text/plain; charset=utf-8"; } else { return "application/json"; } }
class MyContentChannel implements ContentChannel { private final List<ByteBuffer> buffers; private final Runnable trigger; @Override public void write(ByteBuffer buf, CompletionHandler handler) { buffers.add(buf); if (handler != null) handler.completed(); } @Override public void close(CompletionHandler handler) { trigger.run(); if (handler != null) handler.completed(); } MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) { this.buffers = buffers; this.trigger = trigger; } }
class MyContentChannel implements ContentChannel { private final List<ByteBuffer> buffers; private final Runnable trigger; @Override public void write(ByteBuffer buf, CompletionHandler handler) { buffers.add(buf); if (handler != null) handler.completed(); } @Override public void close(CompletionHandler handler) { trigger.run(); if (handler != null) handler.completed(); } MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) { this.buffers = buffers; this.trigger = trigger; } }
Just a side note (as it's not something changed or introduced by this PR), but our legacy naming format is a bit at odds with the Prometheus [naming convention](https://prometheus.io/docs/instrumenting/writing_exporters/#naming): > The `_sum`, `_count`, `_bucket` and `_total` suffixes are used by Summaries, Histograms and Counters. Unless you’re producing one of those, avoid these suffixes. > > `_total` is a convention for counters, you should use it if you’re using the COUNTER type. Something we should keep in mind when we move beyond this stop-gap solution.
private byte[] buildPrometheusForConsumer(String consumer) { var snapshot = getSnapshot(); if (snapshot == null) return new byte[0]; var timestamp = snapshot.getToTime(TimeUnit.MILLISECONDS); var builder = new StringBuilder(); builder.append(" for (var tuple : collapseMetrics(snapshot, consumer)) { var dims = toPrometheusDimensions(tuple.dim); var metricName = prometheusSanitizedName(tuple.key) + "_"; if (tuple.val instanceof GaugeMetric gauge) { appendPrometheusEntry(builder, metricName + "max", dims, gauge.getMax(), timestamp); appendPrometheusEntry(builder, metricName + "sum", dims, gauge.getSum(), timestamp); appendPrometheusEntry(builder, metricName + "count", dims, gauge.getCount(), timestamp); if (gauge.getPercentiles().isPresent()) { for (Tuple2<String, Double> prefixAndValue : gauge.getPercentiles().get()) { appendPrometheusEntry(builder, metricName + prefixAndValue.first + "percentile", dims, prefixAndValue.second, timestamp); } } } else if (tuple.val instanceof CountMetric count) { appendPrometheusEntry(builder, metricName + "count", dims, count.getCount(), timestamp); } } return builder.toString().getBytes(UTF_8); }
appendPrometheusEntry(builder, metricName + "count", dims, gauge.getCount(), timestamp);
private byte[] buildPrometheusForConsumer(String consumer) { var snapshot = getSnapshot(); if (snapshot == null) return new byte[0]; var timestamp = snapshot.getToTime(TimeUnit.MILLISECONDS); var builder = new StringBuilder(); builder.append(" for (var tuple : collapseMetrics(snapshot, consumer)) { var dims = toPrometheusDimensions(tuple.dim); var metricName = prometheusSanitizedName(tuple.key) + "_"; if (tuple.val instanceof GaugeMetric gauge) { appendPrometheusEntry(builder, metricName + "max", dims, gauge.getMax(), timestamp); appendPrometheusEntry(builder, metricName + "sum", dims, gauge.getSum(), timestamp); appendPrometheusEntry(builder, metricName + "count", dims, gauge.getCount(), timestamp); if (gauge.getPercentiles().isPresent()) { for (Tuple2<String, Double> prefixAndValue : gauge.getPercentiles().get()) { appendPrometheusEntry(builder, metricName + prefixAndValue.first + "percentile", dims, prefixAndValue.second, timestamp); } } } else if (tuple.val instanceof CountMetric count) { appendPrometheusEntry(builder, metricName + "count", dims, count.getCount(), timestamp); } } return builder.toString().getBytes(UTF_8); }
class MyContentChannel implements ContentChannel { private final List<ByteBuffer> buffers; private final Runnable trigger; @Override public void write(ByteBuffer buf, CompletionHandler handler) { buffers.add(buf); if (handler != null) handler.completed(); } @Override public void close(CompletionHandler handler) { trigger.run(); if (handler != null) handler.completed(); } MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) { this.buffers = buffers; this.trigger = trigger; } }
class MyContentChannel implements ContentChannel { private final List<ByteBuffer> buffers; private final Runnable trigger; @Override public void write(ByteBuffer buf, CompletionHandler handler) { buffers.add(buf); if (handler != null) handler.completed(); } @Override public void close(CompletionHandler handler) { trigger.run(); if (handler != null) handler.completed(); } MyContentChannel(List<ByteBuffer> buffers, Runnable trigger) { this.buffers = buffers; this.trigger = trigger; } }