comment
stringlengths
22
3.02k
method_body
stringlengths
46
368k
target_code
stringlengths
0
181
method_body_after
stringlengths
12
368k
context_before
stringlengths
11
634k
context_after
stringlengths
11
632k
Do we already have positive tests for this?
public void setup() { compileResult = BCompileUtil.compile("test-src/annotations/annot_attachments_negative.bal"); Assert.assertEquals(compileResult.getErrorCount(), 266); }
Assert.assertEquals(compileResult.getErrorCount(), 266);
public void setup() { compileResult = BCompileUtil.compile("test-src/annotations/annot_attachments_negative.bal"); Assert.assertEquals(compileResult.getErrorCount(), 266); }
class AnnotationAttachmentNegativeTest { private CompileResult compileResult; @BeforeClass @Test public void testInvalidAttachmentOnType() { int index = 0; int line = 39; validateError(compileResult, index++, "annotation 'v2' is not allowed on type", line, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on type", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on type", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on type", line + 3, 1); } @Test public void testInvalidAttachmentOnObjectType() { int index = 13; int line = 80; validateError(compileResult, index++, "annotation 'v3' is not allowed on class", line, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on class", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on class", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on class", line + 3, 1); } @Test public void testInvalidAttachmentOnObjectMethodDefinition() { int index = 25; int line = 117; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnObjectMethodDeclaration() { int index = 36; int line = 155; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnFunction() { int index = 47; int line = 230; validateError(compileResult, index++, "annotation 'v1' is not allowed on function", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on function", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on function", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on function", line + 3, 1); } @Test public void testInvalidAttachmentOnParam() { int index = 60; int line = 271; validateError(compileResult, index++, "annotation 'v1' is not allowed on parameter", line, 31); validateError(compileResult, index++, "annotation 'v2' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v3' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v4' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v5' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v7' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v8' is not allowed on parameter", ++line, 29); validateError(compileResult, index++, "annotation 'v9' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v10' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v11' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v12' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v13' is not allowed on parameter", line += 3, 29); validateError(compileResult, index, "annotation 'v15' is not allowed on parameter", line + 3, 29); } @Test public void testInvalidAttachmentOnReturn() { int index = 73; int line = 307; validateError(compileResult, index++, "annotation 'v1' is not allowed on return", line, 53); validateError(compileResult, index++, "annotation 'v2' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v3' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v4' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v5' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v6' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v8' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v9' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v10' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v11' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v12' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v13' is not allowed on return", line += 3, 53); validateError(compileResult, index, "annotation 'v15' is not allowed on return", line + 3, 53); } @Test public void testInvalidAttachmentOnListener() { int index = 86; int line = 349; validateError(compileResult, index++, "annotation 'v1' is not allowed on listener", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on listener", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on listener", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on listener", line + 3, 1); } @Test public void testInvalidAttachmentOnService() { int index = 99; int line = 388; validateError(compileResult, index++, "annotation 'v1' is not allowed on service", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on service", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on service", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on service", line + 3, 1); } @Test public void testInvalidAttachmentOnResource() { int index = 112; int line = 427; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnAnnotation() { int index = 123; int line = 490; validateError(compileResult, index++, "annotation 'v1' is not allowed on annotation", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on annotation", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on annotation", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on annotation", line + 3, 1); } @Test public void testInvalidAttachmentOnVar() { int index = 136; int line = 529; validateError(compileResult, index++, "annotation 'v1' is not allowed on var", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on var", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on var", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on var", line + 3, 1); } @Test public void testInvalidAttachmentOnLetVar() { int index = 149; int line = 568; validateError(compileResult, index++, "annotation 'v1' is not allowed on var", line, 13); validateError(compileResult, index++, "annotation 'v2' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v3' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v4' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v5' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v6' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v7' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v8' is not allowed on var", ++line, 13); validateError(compileResult, index++, "annotation 'v9' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v10' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v12' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v13' is not allowed on var", line += 3, 13); validateError(compileResult, index, "annotation 'v15' is not allowed on var", line + 3, 13); } @Test public void testInvalidAttachmentOnConst() { int index = 162; int line = 606; validateError(compileResult, index++, "annotation 'v1' is not allowed on const", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on const", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on const", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on const", line + 3, 1); } @Test public void testInvalidAttachmentOnExternal() { int index = 175; int line = 645; validateError(compileResult, index++, "annotation 'v1' is not allowed on external", line, 62); validateError(compileResult, index++, "annotation 'v2' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v3' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v4' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v5' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v6' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v7' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v8' is not allowed on external", ++line, 61); validateError(compileResult, index++, "annotation 'v9' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v10' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v11' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v12' is not allowed on external", line += 3, 61); validateError(compileResult, index, "annotation 'v15' is not allowed on external", line + 3, 61); } @Test public void testInvalidAttachmentOnServiceVariable() { int index = 188; int line = 683; validateError(compileResult, index++, "annotation 'v8' is not allowed on var", line, 1); validateError(compileResult, index++, "annotation 'v1' is not allowed on service", line += 4, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on service", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on service", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on service", line + 3, 1); } @Test public void testInvalidAttachmentOnWorker() { int index = 202; int line = 733; validateError(compileResult, index++, "annotation 'v1' is not allowed on worker", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v3' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v4' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v5' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on worker", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on worker", line += 3, 5); validateError(compileResult, index, "annotation 'v13' is not allowed on worker", line + 3, 5); } @Test public void testInvalidAttachmentOnStart() { int index = 215; int line = 776; validateError(compileResult, index++, "action invocation as an expression not allowed here", line, 1); validateError(compileResult, index++, "annotation 'v1' is not allowed on worker", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on worker", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on worker", line += 3, 1); validateError(compileResult, index, "annotation 'v13' is not allowed on worker", line + 3, 1); } @Test public void testInvalidAttachmentForField() { int index = 229; validateError(compileResult, index++, "annotation 'v16' is not allowed on var", 819, 1); validateError(compileResult, index++, "annotation 'v16' is not allowed on function", 821, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on function", 822, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on function", 823, 1); validateError(compileResult, index++, "annotation 'v16' is not allowed on type", 828, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on type", 829, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on type", 830, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on record_field, field", 832, 5); validateError(compileResult, index++, "annotation 'v16' is not allowed on class", 835, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on class", 836, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on class", 837, 1); validateError(compileResult, index, "annotation 'v18' is not allowed on object_field, field", 839, 5); } @Test public void testInvalidAttachmentForTypeConversionExpr() { int index = 241; validateError(compileResult, index++, "annotation 'v16' is not allowed on type", 847, 17); } @Test public void testInvalidAttachmentForClass() { int index = 242; validateError(compileResult, index++, "annotation 'v19' is not allowed on class", 852, 6); } @Test public void testQualifiedNameInInvalidAttachmentError() { validateError(compileResult, 243, "annotation 'ballerina/lang.annotations:1.0.0:tainted' is not allowed on class", 859, 1); } @Test public void testInvalidAttachmentWithValue() { validateError(compileResult, 244, "no annotation value expected for annotation 'ballerina/lang.annotations:1.0.0:tainted'", 864, 10); validateError(compileResult, 245, "no annotation value expected for annotation 'v7'", 869, 35); } @Test public void testInvalidAttachmentWithoutValue() { validateError(compileResult, 246, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 871, 1); validateError(compileResult, 247, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 874, 1); validateError(compileResult, 248, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 875, 22); validateError(compileResult, 249, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 881, 1); validateError(compileResult, 250, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 882, 1); } @Test public void testInvalidAttachmentCount() { validateError(compileResult, 251, "cannot specify more than one annotation value for " + "annotation 'ballerina/lang.annotations:1.0.0:tainted'", 886, 1); validateError(compileResult, 252, "cannot specify more than one annotation value for annotation 'v1'", 888, 1); } @Test public void testInvalidAttachmentOnServiceClass() { int index = 253; int line = 892; validateError(compileResult, index++, "annotation 'v1' is not allowed on class", line, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on class", line += 6, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on class", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on class", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on class", line + 3, 1); } }
class AnnotationAttachmentNegativeTest { private CompileResult compileResult; @BeforeClass @Test public void testInvalidAttachmentOnType() { int index = 0; int line = 39; validateError(compileResult, index++, "annotation 'v2' is not allowed on type", line, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on type", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on type", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on type", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on type", line + 3, 1); } @Test public void testInvalidAttachmentOnObjectType() { int index = 13; int line = 80; validateError(compileResult, index++, "annotation 'v3' is not allowed on class", line, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on class", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on class", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on class", line + 3, 1); } @Test public void testInvalidAttachmentOnObjectMethodDefinition() { int index = 25; int line = 117; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnObjectMethodDeclaration() { int index = 36; int line = 155; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnFunction() { int index = 47; int line = 230; validateError(compileResult, index++, "annotation 'v1' is not allowed on function", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on function", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on function", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on function", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on function", line + 3, 1); } @Test public void testInvalidAttachmentOnParam() { int index = 60; int line = 271; validateError(compileResult, index++, "annotation 'v1' is not allowed on parameter", line, 31); validateError(compileResult, index++, "annotation 'v2' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v3' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v4' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v5' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v7' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v8' is not allowed on parameter", ++line, 29); validateError(compileResult, index++, "annotation 'v9' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v10' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v11' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v12' is not allowed on parameter", line += 3, 29); validateError(compileResult, index++, "annotation 'v13' is not allowed on parameter", line += 3, 29); validateError(compileResult, index, "annotation 'v15' is not allowed on parameter", line + 3, 29); } @Test public void testInvalidAttachmentOnReturn() { int index = 73; int line = 307; validateError(compileResult, index++, "annotation 'v1' is not allowed on return", line, 53); validateError(compileResult, index++, "annotation 'v2' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v3' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v4' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v5' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v6' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v8' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v9' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v10' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v11' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v12' is not allowed on return", line += 3, 53); validateError(compileResult, index++, "annotation 'v13' is not allowed on return", line += 3, 53); validateError(compileResult, index, "annotation 'v15' is not allowed on return", line + 3, 53); } @Test public void testInvalidAttachmentOnListener() { int index = 86; int line = 349; validateError(compileResult, index++, "annotation 'v1' is not allowed on listener", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on listener", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on listener", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on listener", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on listener", line + 3, 1); } @Test public void testInvalidAttachmentOnService() { int index = 99; int line = 388; validateError(compileResult, index++, "annotation 'v1' is not allowed on service", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on service", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on service", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on service", line + 3, 1); } @Test public void testInvalidAttachmentOnResource() { int index = 112; int line = 427; validateError(compileResult, index++, "annotation 'v1' is not allowed on object_method, function", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on object_method, function", line += 6, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on object_method, function", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index++, "annotation 'v13' is not allowed on object_method, function", line += 3, 5); validateError(compileResult, index, "annotation 'v15' is not allowed on object_method, function", line + 3, 5); } @Test public void testInvalidAttachmentOnAnnotation() { int index = 123; int line = 490; validateError(compileResult, index++, "annotation 'v1' is not allowed on annotation", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on annotation", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on annotation", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on annotation", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on annotation", line + 3, 1); } @Test public void testInvalidAttachmentOnVar() { int index = 136; int line = 529; validateError(compileResult, index++, "annotation 'v1' is not allowed on var", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on var", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on var", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on var", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on var", line + 3, 1); } @Test public void testInvalidAttachmentOnLetVar() { int index = 149; int line = 568; validateError(compileResult, index++, "annotation 'v1' is not allowed on var", line, 13); validateError(compileResult, index++, "annotation 'v2' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v3' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v4' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v5' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v6' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v7' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v8' is not allowed on var", ++line, 13); validateError(compileResult, index++, "annotation 'v9' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v10' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v12' is not allowed on var", line += 3, 13); validateError(compileResult, index++, "annotation 'v13' is not allowed on var", line += 3, 13); validateError(compileResult, index, "annotation 'v15' is not allowed on var", line + 3, 13); } @Test public void testInvalidAttachmentOnConst() { int index = 162; int line = 606; validateError(compileResult, index++, "annotation 'v1' is not allowed on const", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on const", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on const", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on const", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on const", line + 3, 1); } @Test public void testInvalidAttachmentOnExternal() { int index = 175; int line = 645; validateError(compileResult, index++, "annotation 'v1' is not allowed on external", line, 62); validateError(compileResult, index++, "annotation 'v2' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v3' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v4' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v5' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v6' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v7' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v8' is not allowed on external", ++line, 61); validateError(compileResult, index++, "annotation 'v9' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v10' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v11' is not allowed on external", line += 3, 61); validateError(compileResult, index++, "annotation 'v12' is not allowed on external", line += 3, 61); validateError(compileResult, index, "annotation 'v15' is not allowed on external", line + 3, 61); } @Test public void testInvalidAttachmentOnServiceVariable() { int index = 188; int line = 683; validateError(compileResult, index++, "annotation 'v8' is not allowed on var", line, 1); validateError(compileResult, index++, "annotation 'v1' is not allowed on service", line += 4, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on service", ++line, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on service", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on service", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on service", line + 3, 1); } @Test public void testInvalidAttachmentOnWorker() { int index = 202; int line = 733; validateError(compileResult, index++, "annotation 'v1' is not allowed on worker", line, 5); validateError(compileResult, index++, "annotation 'v2' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v3' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v4' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v5' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v6' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v7' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v8' is not allowed on worker", ++line, 5); validateError(compileResult, index++, "annotation 'v9' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v10' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v11' is not allowed on worker", line += 3, 5); validateError(compileResult, index++, "annotation 'v12' is not allowed on worker", line += 3, 5); validateError(compileResult, index, "annotation 'v13' is not allowed on worker", line + 3, 5); } @Test public void testInvalidAttachmentOnStart() { int index = 215; int line = 776; validateError(compileResult, index++, "action invocation as an expression not allowed here", line, 1); validateError(compileResult, index++, "annotation 'v1' is not allowed on worker", line, 1); validateError(compileResult, index++, "annotation 'v2' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on worker", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on worker", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on worker", line += 3, 1); validateError(compileResult, index, "annotation 'v13' is not allowed on worker", line + 3, 1); } @Test public void testInvalidAttachmentForField() { int index = 229; validateError(compileResult, index++, "annotation 'v16' is not allowed on var", 819, 1); validateError(compileResult, index++, "annotation 'v16' is not allowed on function", 821, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on function", 822, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on function", 823, 1); validateError(compileResult, index++, "annotation 'v16' is not allowed on type", 828, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on type", 829, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on type", 830, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on record_field, field", 832, 5); validateError(compileResult, index++, "annotation 'v16' is not allowed on class", 835, 1); validateError(compileResult, index++, "annotation 'v17' is not allowed on class", 836, 1); validateError(compileResult, index++, "annotation 'v18' is not allowed on class", 837, 1); validateError(compileResult, index, "annotation 'v18' is not allowed on object_field, field", 839, 5); } @Test public void testInvalidAttachmentForTypeConversionExpr() { int index = 241; validateError(compileResult, index++, "annotation 'v16' is not allowed on type", 847, 17); } @Test public void testInvalidAttachmentForClass() { int index = 242; validateError(compileResult, index++, "annotation 'v19' is not allowed on class", 852, 6); } @Test public void testQualifiedNameInInvalidAttachmentError() { validateError(compileResult, 243, "annotation 'ballerina/lang.annotations:1.0.0:tainted' is not allowed on class", 859, 1); } @Test public void testInvalidAttachmentWithValue() { validateError(compileResult, 244, "no annotation value expected for annotation 'ballerina/lang.annotations:1.0.0:tainted'", 864, 10); validateError(compileResult, 245, "no annotation value expected for annotation 'v7'", 869, 35); } @Test public void testInvalidAttachmentWithoutValue() { validateError(compileResult, 246, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 871, 1); validateError(compileResult, 247, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 874, 1); validateError(compileResult, 248, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 875, 22); validateError(compileResult, 249, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 881, 1); validateError(compileResult, 250, "annotation value expected for annotation of " + "record type 'Annot' with required fields", 882, 1); } @Test public void testInvalidAttachmentCount() { validateError(compileResult, 251, "cannot specify more than one annotation value for " + "annotation 'ballerina/lang.annotations:1.0.0:tainted'", 886, 1); validateError(compileResult, 252, "cannot specify more than one annotation value for annotation 'v1'", 888, 1); } @Test public void testInvalidAttachmentOnServiceClass() { int index = 253; int line = 892; validateError(compileResult, index++, "annotation 'v1' is not allowed on class", line, 1); validateError(compileResult, index++, "annotation 'v3' is not allowed on class", line += 6, 1); validateError(compileResult, index++, "annotation 'v4' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v5' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v6' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v7' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v8' is not allowed on class", ++line, 1); validateError(compileResult, index++, "annotation 'v9' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v10' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v11' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v12' is not allowed on class", line += 3, 1); validateError(compileResult, index++, "annotation 'v13' is not allowed on class", line += 3, 1); validateError(compileResult, index, "annotation 'v15' is not allowed on class", line + 3, 1); } }
I think this is no longer valid.
private ClassLoader getClassLoader() { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = getClass().getClassLoader(); } if (cl == null) { cl = Object.class.getClassLoader(); } return cl; }
private ClassLoader getClassLoader() { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = getClass().getClassLoader(); } if (cl == null) { cl = Object.class.getClassLoader(); } return cl; }
class DevClasspathStaticHandler implements Handler<RoutingContext> { private static final Logger LOG = Logger.getLogger(DevClasspathStaticHandler.class); private static final Set<HttpMethod> ALLOWED_HTTP_METHODS = Set.of(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.OPTIONS); private static final int HTTP_STATUS_OK = 200; private static final int HTTP_STATUS_NO_CONTENT = 204; private static final String ALLOW_HEADER = "Allow"; private static final String ALLOW_HEADER_VALUE = "HEAD,GET,OPTIONS"; private final Set<String> generatedResources; private final Set<String> compressedMediaTypes; private final ClassLoader currentClassLoader; private final boolean enableCompression; private final String indexPage; private final Charset defaultEncoding; public DevClasspathStaticHandler(Set<String> generatedResources, DevClasspathStaticHandlerOptions options) { this.generatedResources = generatedResources; this.compressedMediaTypes = options.getCompressMediaTypes(); this.currentClassLoader = Thread.currentThread().getContextClassLoader(); this.enableCompression = options.isEnableCompression(); this.indexPage = options.getIndexPage(); this.defaultEncoding = options.getDefaultEncoding(); } @Override public void handle(RoutingContext context) { if (!ALLOWED_HTTP_METHODS.contains(context.request().method())) { LOG.debugf("HTTP method '%s' not allowed, this static handler accepts only GET, OPTIONS and HEAD", context.request().method().name()); beforeNextHandler(this.currentClassLoader, context); } else { doHandle(context); } } private String removeStartSlashes(String value) { return value.replaceAll("^/+", ""); } private void doHandle(RoutingContext context) { String resolvedPath = resolvePath(context); String path = resolvedPath.endsWith("/") ? resolvedPath.concat(this.indexPage) : resolvedPath; if (LOG.isDebugEnabled()) { LOG.debugf("Handling request for path '%s'", path); } boolean containsGeneratedResource = this.generatedResources.contains(removeStartSlashes(path)); if (!containsGeneratedResource) { beforeNextHandler(this.currentClassLoader, context); return; } if (context.request().method().equals(HttpMethod.OPTIONS)) { context.response().putHeader(ALLOW_HEADER, ALLOW_HEADER_VALUE).setStatusCode(HTTP_STATUS_NO_CONTENT) .send(); return; } compressIfNeeded(context, path); byte[] content = getClasspathResourceContent(path); if (content != null) { if (context.request().method().equals(HttpMethod.HEAD)) { handleHeadMethod(context, path, content); } else { context.response().setStatusCode(HTTP_STATUS_OK).send(Buffer.buffer(content)); } } else { LOG.warnf("The '%s' file does not contain any content. Proceeding to the next handler if it exists", path); beforeNextHandler(this.currentClassLoader, context); } } private void handleHeadMethod(RoutingContext context, String path, byte[] content) { String contentType = MimeMapping.getMimeTypeForFilename(path); if (contentType != null) { if (contentType.startsWith("text")) { context.response().putHeader(HttpHeaders.CONTENT_TYPE, contentType + ";charset=" + defaultEncoding); } else { context.response().putHeader(HttpHeaders.CONTENT_TYPE, contentType); } } context.response().putHeader(HttpHeaders.CONTENT_LENGTH, Long.toString(content.length)); context.response().setStatusCode(HTTP_STATUS_OK).end(); } private byte[] getClasspathResourceContent(String name) { String resourceName = GeneratedStaticResourcesRecorder.META_INF_RESOURCES + name; URL resource = getClassLoader().getResource(resourceName); if (resource == null) { LOG.warnf("The resource '%s' does not exist on classpath", resourceName); return null; } try { try (InputStream inputStream = resource.openStream()) { return inputStream.readAllBytes(); } } catch (IOException e) { LOG.error("Error while reading file from Classpath for path " + resourceName, e); return null; } } private static String resolvePath(RoutingContext ctx) { return (ctx.mountPoint() == null) ? ctx.normalizedPath() : ctx.normalizedPath().substring( ctx.mountPoint().endsWith("/") ? ctx.mountPoint().length() - 1 : ctx.mountPoint().length()); } private static void beforeNextHandler(ClassLoader cl, RoutingContext ctx) { Thread.currentThread().setContextClassLoader(cl); ctx.next(); } private void compressIfNeeded(RoutingContext ctx, String path) { if (enableCompression && isCompressed(path)) { ctx.response().headers().remove(HttpHeaders.CONTENT_ENCODING); } } private boolean isCompressed(String path) { if (this.compressedMediaTypes.isEmpty()) { return false; } final String resourcePath = path.endsWith("/") ? path + this.indexPage : path; String contentType = MimeMapping.getMimeTypeForFilename(resourcePath); return contentType != null && this.compressedMediaTypes.contains(contentType); } }
class DevClasspathStaticHandler implements Handler<RoutingContext> { private static final Logger LOG = Logger.getLogger(DevClasspathStaticHandler.class); private static final int HTTP_STATUS_OK = 200; private static final int HTTP_STATUS_NO_CONTENT = 204; private static final String ALLOW_HEADER = "Allow"; private static final String ALLOW_HEADER_VALUE = "HEAD,GET,OPTIONS"; private final Set<String> generatedResources; private final Set<String> compressedMediaTypes; private final ClassLoader currentClassLoader; private final boolean enableCompression; private final String indexPage; private final Charset defaultEncoding; public DevClasspathStaticHandler(Set<String> generatedResources, DevClasspathStaticHandlerOptions options) { this.generatedResources = generatedResources; this.compressedMediaTypes = options.getCompressMediaTypes(); this.currentClassLoader = Thread.currentThread().getContextClassLoader(); this.enableCompression = options.isEnableCompression(); this.indexPage = options.getIndexPage(); this.defaultEncoding = options.getDefaultEncoding(); } @Override public void handle(RoutingContext context) { String resolvedPath = resolvePath(context); String path = resolvedPath.endsWith("/") ? resolvedPath.concat(this.indexPage) : resolvedPath; if (LOG.isDebugEnabled()) { LOG.debugf("Handling request for path '%s'", path); } boolean containsGeneratedResource = this.generatedResources.contains(path); if (!containsGeneratedResource) { beforeNextHandler(this.currentClassLoader, context); return; } if (context.request().method().equals(HttpMethod.OPTIONS)) { context.response().putHeader(ALLOW_HEADER, ALLOW_HEADER_VALUE).setStatusCode(HTTP_STATUS_NO_CONTENT) .send(); return; } compressIfNeeded(context, path); context.vertx().executeBlocking(future -> { try { byte[] content = getClasspathResourceContent(path); future.complete(content); } catch (Exception e) { future.fail(e); } }, asyncResult -> { if (asyncResult.succeeded()) { byte[] result = (byte[]) asyncResult.result(); handleAsyncResultSucceeded(context, result, path); } else { context.fail(asyncResult.cause()); } }); } private void handleAsyncResultSucceeded(RoutingContext context, byte[] result, String path) { if (result == null) { LOG.warnf("The '%s' file does not contain any content. Proceeding to the next handler if it exists", path); beforeNextHandler(this.currentClassLoader, context); return; } String contentType = MimeMapping.getMimeTypeForFilename(path); if (contentType != null) { if (contentType.startsWith("text")) { context.response().putHeader(HttpHeaders.CONTENT_TYPE, contentType + ";charset=" + defaultEncoding); } else { context.response().putHeader(HttpHeaders.CONTENT_TYPE, contentType); } } if (context.request().method().equals(HttpMethod.HEAD)) { handleHeadMethod(context, result); } else { context.response().send(Buffer.buffer(result)); } } private void handleHeadMethod(RoutingContext context, byte[] content) { context.response().putHeader(HttpHeaders.CONTENT_LENGTH, Long.toString(content.length)); context.response().setStatusCode(HTTP_STATUS_OK).end(); } private byte[] getClasspathResourceContent(String name) { String resourceName = GeneratedStaticResourcesRecorder.META_INF_RESOURCES + name; URL resource = getClassLoader().getResource(resourceName); if (resource == null) { LOG.warnf("The resource '%s' does not exist on classpath", resourceName); return null; } try { try (InputStream inputStream = resource.openStream()) { return inputStream.readAllBytes(); } } catch (IOException e) { LOG.error("Error while reading file from Classpath for path " + resourceName, e); return null; } } private static String resolvePath(RoutingContext ctx) { return (ctx.mountPoint() == null) ? ctx.normalizedPath() : ctx.normalizedPath().substring( ctx.mountPoint().endsWith("/") ? ctx.mountPoint().length() - 1 : ctx.mountPoint().length()); } private static void beforeNextHandler(ClassLoader cl, RoutingContext ctx) { Thread.currentThread().setContextClassLoader(cl); ctx.next(); } private void compressIfNeeded(RoutingContext ctx, String path) { if (enableCompression && isCompressed(path)) { ctx.response().headers().remove(HttpHeaders.CONTENT_ENCODING); } } private boolean isCompressed(String path) { if (this.compressedMediaTypes.isEmpty()) { return false; } final String resourcePath = path.endsWith("/") ? path + this.indexPage : path; String contentType = MimeMapping.getMimeTypeForFilename(resourcePath); return contentType != null && this.compressedMediaTypes.contains(contentType); } }
Okay got it :) So we can remove the check for the os and have only the check for the file name
private static boolean isDirEmpty(final Path directory) throws IOException { try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(directory)) { Iterator pathIterator = dirStream.iterator(); if (pathIterator.hasNext()) { Path path = (Path) pathIterator.next(); Path filePath = path.getFileName(); if (filePath != null && filePath.toString().equals(ProjectDirConstants.MAC_OS_DS_STORE_FILE)) { return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("mac") && !pathIterator.hasNext(); } else { return false; } } else { return true; } } }
!pathIterator.hasNext();
private static boolean isDirEmpty(final Path directory) throws IOException { try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(directory)) { Iterator pathIterator = dirStream.iterator(); if (!pathIterator.hasNext()) { return true; } Path path = (Path) pathIterator.next(); Path fileName = path.getFileName(); return fileName != null && fileName.toString().equals(ProjectDirConstants.DS_STORE_FILE) && !pathIterator.hasNext(); } }
class InitCommand implements BLauncherCmd { public static final String DEFAULT_VERSION = "0.0.1"; private static final String USER_DIR = "user.dir"; private static final PrintStream errStream = System.err; private final Path homePath = RepoUtils.createAndGetHomeReposPath(); private boolean alreadyInitializedProject = false; private boolean manifestExistInProject = false; private PrintStream out = System.out; @CommandLine.Option(names = {"--interactive", "-i"}) private boolean interactiveFlag; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @Override public void execute() { Path projectPath = Paths.get(System.getProperty(USER_DIR)); try { boolean isProject = Files.exists(projectPath.resolve(ProjectDirConstants.DOT_BALLERINA_DIR_NAME)); if (isProject) { alreadyInitializedProject = true; manifestExistInProject = Files.exists(projectPath.resolve(ProjectDirConstants.MANIFEST_FILE_NAME)); } if (!alreadyInitializedProject) { Optional<Path> childDotBallerina = Files.walk(projectPath) .filter(path -> Files.isDirectory(path) && path.toFile().getName().equals(ProjectDirConstants.DOT_BALLERINA_DIR_NAME)) .findFirst(); if (childDotBallerina.isPresent()) { errStream.println("A ballerina project is already initialized in " + childDotBallerina.get().toFile().getParent()); return; } Path projectRoot = findProjectRoot(projectPath); if (projectRoot != null) { errStream.println("Directory is already within a ballerina project :" + projectRoot.toString()); return; } } } catch (IOException ignore) { } Scanner scanner = new Scanner(System.in, Charset.defaultCharset().name()); try { Manifest manifest = null; if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(INIT_COMMAND); errStream.println(commandUsageInfo); return; } List<SrcFile> sourceFiles = new ArrayList<>(); List<ModuleMdFile> moduleMdFiles = new ArrayList<>(); boolean validInput = false; boolean firstPrompt = true; if (interactiveFlag) { if (!manifestExistInProject) { out.print("Create Ballerina.toml [yes/y, no/n]: (y) "); String createToml = scanner.nextLine().trim(); manifest = createManifest(scanner, createToml); } if (alreadyInitializedProject) { out.print("Create modules [yes/y, no/n]: (y) "); firstPrompt = false; String input = scanner.nextLine().trim(); if (input.equalsIgnoreCase("n")) { out.println("Ballerina project not reinitialized"); return; } } String srcInput; do { if (firstPrompt) { out.print("Ballerina source [service/s, main/m, finish/f]: (s) "); } else { out.print("Ballerina source [service/s, main/m, finish/f]: (f) "); } srcInput = scanner.nextLine().trim(); if (srcInput.equalsIgnoreCase("service") || srcInput.equalsIgnoreCase("s") || (srcInput.isEmpty() && firstPrompt)) { String packageName; do { out.print("Module for the service: (no module) "); packageName = scanner.nextLine().trim(); } while (!validatePkgName(projectPath, packageName)); SrcFile srcFile = new SrcFile(packageName, FileType.SERVICE); sourceFiles.add(srcFile); SrcFile srcTestFile = new SrcFile(packageName, FileType.SERVICE_TEST); sourceFiles.add(srcTestFile); if (!packageName.isEmpty()) { ModuleMdFile moduleMdFile = new ModuleMdFile(packageName, FileType.SERVICE); moduleMdFiles.add(moduleMdFile); } firstPrompt = false; } else if (srcInput.equalsIgnoreCase("main") || srcInput.equalsIgnoreCase("m")) { String packageName; do { out.print("Module for the main: (no module) "); packageName = scanner.nextLine().trim(); } while (!validatePkgName(projectPath, packageName)); SrcFile srcFile = new SrcFile(packageName, FileType.MAIN); sourceFiles.add(srcFile); SrcFile srcTestFile = new SrcFile(packageName, FileType.MAIN_TEST); sourceFiles.add(srcTestFile); if (!packageName.isEmpty()) { ModuleMdFile moduleMdFile = new ModuleMdFile(packageName, FileType.MAIN); moduleMdFiles.add(moduleMdFile); } firstPrompt = false; } else if (srcInput.isEmpty() || srcInput.equalsIgnoreCase("f") || srcInput.equalsIgnoreCase("finish")) { validInput = true; firstPrompt = false; } else { out.println("Invalid input"); } } while (!validInput); out.print("\n"); } else { manifest = new Manifest(); manifest.setName(guessOrgName()); manifest.setVersion(DEFAULT_VERSION); if (isDirEmpty(projectPath)) { SrcFile srcFile = new SrcFile("", FileType.SERVICE); sourceFiles.add(srcFile); } } InitHandler.initialize(projectPath, manifest, sourceFiles, moduleMdFiles); if (!alreadyInitializedProject) { out.println("Ballerina project initialized"); } else { out.println("Ballerina project reinitialized"); } } catch (IOException e) { out.println("Error occurred while creating project: " + e.getMessage()); } } /** * Create a manifest object. * * @param scanner scanner object * @param createToml create toml or not * @return manifest object */ private Manifest createManifest(Scanner scanner, String createToml) { Manifest manifest = new Manifest(); if (createToml.equalsIgnoreCase("yes") || createToml.equalsIgnoreCase("y") || createToml.isEmpty()) { String defaultOrg = guessOrgName(); String orgName; do { out.print("Organization name: (" + defaultOrg + ") "); orgName = scanner.nextLine().trim(); } while (!validateOrgName(orgName)); manifest.setName(orgName.isEmpty() ? defaultOrg : orgName); String version; do { out.print("Version: (" + DEFAULT_VERSION + ") "); version = scanner.nextLine().trim(); version = version.isEmpty() ? DEFAULT_VERSION : version; } while (!validateVersion(out, version)); manifest.setVersion(version); } return manifest; } /** * {@inheritDoc} */ @Override public String getName() { return INIT_COMMAND; } /** * {@inheritDoc} */ @Override public void printLongDesc(StringBuilder out) { out.append("Initializes a Ballerina Project. \n"); out.append("\n"); out.append("Use --interactive or -i to create a ballerina project in interactive mode.\n"); } /** * {@inheritDoc} */ @Override public void printUsage(StringBuilder out) { out.append(" ballerina init [-i] \n"); } /** * {@inheritDoc} */ @Override public void setParentCmdParser(CommandLine parentCmdParser) { } /** * {@inheritDoc} */ @Override public void setSelfCmdParser(CommandLine selfCmdParser) { } /** * Validates the version is a semver version. * * @param versionAsString The version. * @return True if valid version, else false. */ private boolean validateVersion(PrintStream out, String versionAsString) { String semverRegex = "((?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*))"; boolean matches = Pattern.matches(semverRegex, versionAsString); if (!matches) { out.println("--Invalid version: \"" + versionAsString + "\""); } return matches; } private String guessOrgName() { String guessOrgName = System.getProperty("user.name"); if (guessOrgName == null) { guessOrgName = "my_org"; } else { guessOrgName = guessOrgName.toLowerCase(Locale.getDefault()); } return guessOrgName; } /** * Validates the org-name. * * @param orgName The org-name. * @return True if valid org-name, else false. */ private boolean validateOrgName(String orgName) { if (RepoUtils.isReservedOrgName(orgName)) { out.println("--Invalid organization name: \'" + orgName + "\'. 'ballerina' and 'ballerinax' are reserved " + "organization names that are used by Ballerina"); return false; } boolean matches = RepoUtils.validateOrg(orgName); if (!matches) { out.println("--Invalid organization name: \'" + orgName + "\'. Organization name can only contain " + "lowercase alphanumerics and underscores and the maximum length is 256 characters"); } return matches; } /** * Validates the module name. * * @param projectPath * @param pkgName The module name. * @return True if valid module name, else false. */ private boolean validatePkgName(Path projectPath, String pkgName) { if (validateExistingModules(projectPath, pkgName)) { return false; } if (pkgName.isEmpty()) { return true; } boolean matches = RepoUtils.validatePkg(pkgName); if (!matches) { out.println("--Invalid module name: \'" + pkgName + "\'. Module name can only contain " + "alphanumerics, underscores and periods and the maximum length is 256 characters"); } return matches; } /** * Find the project root by recursively up to the root. * * @param projectDir project path * @return project root */ private Path findProjectRoot(Path projectDir) { Path path = projectDir.resolve(ProjectDirConstants.DOT_BALLERINA_DIR_NAME); if (!path.equals(homePath) && java.nio.file.Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { return projectDir; } Path parentsParent = projectDir.getParent(); if (null != parentsParent) { return findProjectRoot(parentsParent); } return null; } /** * Validate existing modules. * * @param projectPath project path * @param moduleNames modules name * @return if the module name already exists */ private boolean validateExistingModules(Path projectPath, String moduleNames) { if (alreadyInitializedProject) { List<Path> modules = new ArrayList<>(); try { modules = Files.list(projectPath).map(Path::getFileName).collect(Collectors.toList()); } catch (IOException ignore) { } if (modules.contains(Paths.get(moduleNames))) { out.println("Module already exists"); return true; } } return false; } }
class InitCommand implements BLauncherCmd { public static final String DEFAULT_VERSION = "0.0.1"; private static final String USER_DIR = "user.dir"; private static final PrintStream errStream = System.err; private final Path homePath = RepoUtils.createAndGetHomeReposPath(); private boolean alreadyInitializedProject = false; private boolean manifestExistInProject = false; private PrintStream out = System.out; @CommandLine.Option(names = {"--interactive", "-i"}) private boolean interactiveFlag; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @Override public void execute() { Path projectPath = Paths.get(System.getProperty(USER_DIR)); try { boolean isProject = Files.exists(projectPath.resolve(ProjectDirConstants.DOT_BALLERINA_DIR_NAME)); if (isProject) { alreadyInitializedProject = true; manifestExistInProject = Files.exists(projectPath.resolve(ProjectDirConstants.MANIFEST_FILE_NAME)); } if (!alreadyInitializedProject) { Optional<Path> childDotBallerina = Files.walk(projectPath) .filter(path -> Files.isDirectory(path) && path.toFile().getName().equals(ProjectDirConstants.DOT_BALLERINA_DIR_NAME)) .findFirst(); if (childDotBallerina.isPresent()) { errStream.println("A ballerina project is already initialized in " + childDotBallerina.get().toFile().getParent()); return; } Path projectRoot = findProjectRoot(projectPath); if (projectRoot != null) { errStream.println("Directory is already within a ballerina project :" + projectRoot.toString()); return; } } } catch (IOException ignore) { } Scanner scanner = new Scanner(System.in, Charset.defaultCharset().name()); try { Manifest manifest = null; if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(INIT_COMMAND); errStream.println(commandUsageInfo); return; } List<SrcFile> sourceFiles = new ArrayList<>(); List<ModuleMdFile> moduleMdFiles = new ArrayList<>(); boolean validInput = false; boolean firstPrompt = true; if (interactiveFlag) { if (!manifestExistInProject) { out.print("Create Ballerina.toml [yes/y, no/n]: (y) "); String createToml = scanner.nextLine().trim(); manifest = createManifest(scanner, createToml); } if (alreadyInitializedProject) { out.print("Create modules [yes/y, no/n]: (y) "); firstPrompt = false; String input = scanner.nextLine().trim(); if (input.equalsIgnoreCase("n")) { out.println("Ballerina project not reinitialized"); return; } } String srcInput; do { if (firstPrompt) { out.print("Ballerina source [service/s, main/m, finish/f]: (s) "); } else { out.print("Ballerina source [service/s, main/m, finish/f]: (f) "); } srcInput = scanner.nextLine().trim(); if (srcInput.equalsIgnoreCase("service") || srcInput.equalsIgnoreCase("s") || (srcInput.isEmpty() && firstPrompt)) { String packageName; do { out.print("Module for the service: (no module) "); packageName = scanner.nextLine().trim(); } while (!validatePkgName(projectPath, packageName)); SrcFile srcFile = new SrcFile(packageName, FileType.SERVICE); sourceFiles.add(srcFile); SrcFile srcTestFile = new SrcFile(packageName, FileType.SERVICE_TEST); sourceFiles.add(srcTestFile); if (!packageName.isEmpty()) { ModuleMdFile moduleMdFile = new ModuleMdFile(packageName, FileType.SERVICE); moduleMdFiles.add(moduleMdFile); } firstPrompt = false; } else if (srcInput.equalsIgnoreCase("main") || srcInput.equalsIgnoreCase("m")) { String packageName; do { out.print("Module for the main: (no module) "); packageName = scanner.nextLine().trim(); } while (!validatePkgName(projectPath, packageName)); SrcFile srcFile = new SrcFile(packageName, FileType.MAIN); sourceFiles.add(srcFile); SrcFile srcTestFile = new SrcFile(packageName, FileType.MAIN_TEST); sourceFiles.add(srcTestFile); if (!packageName.isEmpty()) { ModuleMdFile moduleMdFile = new ModuleMdFile(packageName, FileType.MAIN); moduleMdFiles.add(moduleMdFile); } firstPrompt = false; } else if (srcInput.isEmpty() || srcInput.equalsIgnoreCase("f") || srcInput.equalsIgnoreCase("finish")) { validInput = true; firstPrompt = false; } else { out.println("Invalid input"); } } while (!validInput); out.print("\n"); } else { manifest = new Manifest(); manifest.setName(guessOrgName()); manifest.setVersion(DEFAULT_VERSION); if (isDirEmpty(projectPath)) { SrcFile srcFile = new SrcFile("", FileType.SERVICE); sourceFiles.add(srcFile); } } InitHandler.initialize(projectPath, manifest, sourceFiles, moduleMdFiles); if (!alreadyInitializedProject) { out.println("Ballerina project initialized"); } else { out.println("Ballerina project reinitialized"); } } catch (IOException e) { out.println("Error occurred while creating project: " + e.getMessage()); } } /** * Create a manifest object. * * @param scanner scanner object * @param createToml create toml or not * @return manifest object */ private Manifest createManifest(Scanner scanner, String createToml) { Manifest manifest = new Manifest(); if (createToml.equalsIgnoreCase("yes") || createToml.equalsIgnoreCase("y") || createToml.isEmpty()) { String defaultOrg = guessOrgName(); String orgName; do { out.print("Organization name: (" + defaultOrg + ") "); orgName = scanner.nextLine().trim(); } while (!validateOrgName(orgName)); manifest.setName(orgName.isEmpty() ? defaultOrg : orgName); String version; do { out.print("Version: (" + DEFAULT_VERSION + ") "); version = scanner.nextLine().trim(); version = version.isEmpty() ? DEFAULT_VERSION : version; } while (!validateVersion(out, version)); manifest.setVersion(version); } return manifest; } /** * {@inheritDoc} */ @Override public String getName() { return INIT_COMMAND; } /** * {@inheritDoc} */ @Override public void printLongDesc(StringBuilder out) { out.append("Initializes a Ballerina Project. \n"); out.append("\n"); out.append("Use --interactive or -i to create a ballerina project in interactive mode.\n"); } /** * {@inheritDoc} */ @Override public void printUsage(StringBuilder out) { out.append(" ballerina init [-i] \n"); } /** * {@inheritDoc} */ @Override public void setParentCmdParser(CommandLine parentCmdParser) { } /** * {@inheritDoc} */ @Override public void setSelfCmdParser(CommandLine selfCmdParser) { } /** * Validates the version is a semver version. * * @param versionAsString The version. * @return True if valid version, else false. */ private boolean validateVersion(PrintStream out, String versionAsString) { String semverRegex = "((?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*))"; boolean matches = Pattern.matches(semverRegex, versionAsString); if (!matches) { out.println("--Invalid version: \"" + versionAsString + "\""); } return matches; } private String guessOrgName() { String guessOrgName = System.getProperty("user.name"); if (guessOrgName == null) { guessOrgName = "my_org"; } else { guessOrgName = guessOrgName.toLowerCase(Locale.getDefault()); } return guessOrgName; } /** * Validates the org-name. * * @param orgName The org-name. * @return True if valid org-name, else false. */ private boolean validateOrgName(String orgName) { if (RepoUtils.isReservedOrgName(orgName)) { out.println("--Invalid organization name: \'" + orgName + "\'. 'ballerina' and 'ballerinax' are reserved " + "organization names that are used by Ballerina"); return false; } boolean matches = RepoUtils.validateOrg(orgName); if (!matches) { out.println("--Invalid organization name: \'" + orgName + "\'. Organization name can only contain " + "lowercase alphanumerics and underscores and the maximum length is 256 characters"); } return matches; } /** * Validates the module name. * * @param projectPath * @param pkgName The module name. * @return True if valid module name, else false. */ private boolean validatePkgName(Path projectPath, String pkgName) { if (validateExistingModules(projectPath, pkgName)) { return false; } if (pkgName.isEmpty()) { return true; } boolean matches = RepoUtils.validatePkg(pkgName); if (!matches) { out.println("--Invalid module name: \'" + pkgName + "\'. Module name can only contain " + "alphanumerics, underscores and periods and the maximum length is 256 characters"); } return matches; } /** * Find the project root by recursively up to the root. * * @param projectDir project path * @return project root */ private Path findProjectRoot(Path projectDir) { Path path = projectDir.resolve(ProjectDirConstants.DOT_BALLERINA_DIR_NAME); if (!path.equals(homePath) && java.nio.file.Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { return projectDir; } Path parentsParent = projectDir.getParent(); if (null != parentsParent) { return findProjectRoot(parentsParent); } return null; } /** * Validate existing modules. * * @param projectPath project path * @param moduleNames modules name * @return if the module name already exists */ private boolean validateExistingModules(Path projectPath, String moduleNames) { if (alreadyInitializedProject) { List<Path> modules = new ArrayList<>(); try { modules = Files.list(projectPath).map(Path::getFileName).collect(Collectors.toList()); } catch (IOException ignore) { } if (modules.contains(Paths.get(moduleNames))) { out.println("Module already exists"); return true; } } return false; } }
If you need to support functions not in the form of xx(), but fun(A, B) in the future, how do you need to be compatible?
public Expr obtainExpr() { if (SUPPORTED_DEFAULT_FNS.contains(expr)) { String functionName = expr.replace("()", ""); FunctionCallExpr functionCallExpr = new FunctionCallExpr(new FunctionName(functionName), Lists.newArrayList()); Function fn = Expr.getBuiltinFunction(functionName, new Type[] {}, Function.CompareMode.IS_IDENTICAL); functionCallExpr.setFn(fn); functionCallExpr.setType(fn.getReturnType()); return functionCallExpr; } return null; }
String functionName = expr.replace("()", "");
public Expr obtainExpr() { if (SUPPORTED_DEFAULT_FNS.contains(expr)) { String functionName = expr.replace("()", ""); FunctionCallExpr functionCallExpr = new FunctionCallExpr(new FunctionName(functionName), Lists.newArrayList()); Function fn = Expr.getBuiltinFunction(functionName, new Type[] {}, Function.CompareMode.IS_IDENTICAL); functionCallExpr.setFn(fn); functionCallExpr.setType(fn.getReturnType()); return functionCallExpr; } return null; }
class DefaultExpr { public static final Set<String> SUPPORTED_DEFAULT_FNS = ImmutableSet.of("uuid()", "uuid_numeric()"); @SerializedName("expr") private String expr; public DefaultExpr(String expr) { this.expr = expr; } public String getExpr() { return expr; } public void setExpr(String expr) { this.expr = expr; } }
class DefaultExpr { public static final Set<String> SUPPORTED_DEFAULT_FNS = ImmutableSet.of("uuid()", "uuid_numeric()"); @SerializedName("expr") private String expr; public DefaultExpr(String expr) { this.expr = expr; } public String getExpr() { return expr; } public void setExpr(String expr) { this.expr = expr; } }
Why are we setting a default page size here? Shouldn't this be handled by the creator of this type?
public ContinuablePagedIterable(ContinuablePagedFlux<C, T, P> pagedFlux, int batchSize) { super(pagedFlux); this.pagedFlux = pagedFlux; this.batchSize = batchSize; this.defaultPageSize = 1; this.continuationPredicate = null; this.syncPageRetrieverProvider = null; }
this.defaultPageSize = 1;
public ContinuablePagedIterable(ContinuablePagedFlux<C, T, P> pagedFlux, int batchSize) { super(pagedFlux); this.pagedFlux = pagedFlux; this.batchSize = batchSize; this.defaultPageSize = null; this.continuationPredicate = null; this.pageRetrieverSyncProvider = null; }
class ContinuablePagedIterable<C, T, P extends ContinuablePage<C, T>> extends IterableStream<T> { private static final ClientLogger LOGGER = new ClientLogger(ContinuablePagedIterable.class); private final ContinuablePagedFlux<C, T, P> pagedFlux; private final int batchSize; private final Supplier<SyncPageRetriever<C, P>> syncPageRetrieverProvider; final Integer defaultPageSize; private final Predicate<C> continuationPredicate; /** * Creates instance with the given {@link ContinuablePagedFlux}. * * @param pagedFlux the paged flux use as iterable */ public ContinuablePagedIterable(ContinuablePagedFlux<C, T, P> pagedFlux) { this(pagedFlux, 1); } /** * Creates instance with the given {@link ContinuablePagedFlux}. * * @param pagedFlux the paged flux use as iterable * @param batchSize the bounded capacity to prefetch from the {@link ContinuablePagedFlux} */ /** * Creates instance with the given {@link SyncPageRetriever provider}. * * @param syncPageRetrieverProvider A provider that returns {@link SyncPageRetriever}. * @param pageSize The preferred page size. * @param continuationPredicate A predicate which determines if paging should continue. * @throws NullPointerException If {@code syncPageRetrieverProvider} is null. * @throws IllegalArgumentException If {@code pageSize} is not null and is less than or equal to zero. */ public ContinuablePagedIterable(Supplier<SyncPageRetriever<C, P>> syncPageRetrieverProvider, Integer pageSize, Predicate<C> continuationPredicate) { super(new ContinuablePagedByItemIterable<>(syncPageRetrieverProvider.get(), null, continuationPredicate, pageSize)); this.syncPageRetrieverProvider = Objects.requireNonNull(syncPageRetrieverProvider, "'syncPageRetrieverProvider' function cannot be null."); if (pageSize != null && pageSize <= 0) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'pageSize' must be greater than 0 required but provided: " + pageSize)); } this.continuationPredicate = continuationPredicate; this.defaultPageSize = pageSize; this.pagedFlux = null; this.batchSize = 1; } @Override public Stream<T> stream() { return StreamSupport.stream(iterableByItemInternal().spliterator(), false); } /** * Retrieve the {@link Stream}, one page at a time. It will provide same {@link Stream} of T values from starting if * called multiple times. * * @return {@link Stream} of a pages */ public Stream<P> streamByPage() { return streamByPageInternal(null, null, () -> this.pagedFlux.byPage().toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, starting from the next page associated with the given * continuation token. To start from first page, use {@link * * @param continuationToken The continuation token used to fetch the next page * @return {@link Stream} of a pages */ public Stream<P> streamByPage(C continuationToken) { return streamByPageInternal(continuationToken, null, () -> this.pagedFlux.byPage(continuationToken).toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, with each page containing {@code preferredPageSize} items. * * It will provide same {@link Stream} of T values from starting if called multiple times. * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @return {@link Stream} of a pages */ public Stream<P> streamByPage(int preferredPageSize) { return streamByPageInternal(null, preferredPageSize, () -> this.pagedFlux.byPage(preferredPageSize).toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, with each page containing {@code preferredPageSize} items, * starting from the next page associated with the given continuation token. To start from first page, use {@link * * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @param continuationToken The continuation token used to fetch the next page * @return {@link Stream} of a pages */ public Stream<P> streamByPage(C continuationToken, int preferredPageSize) { return streamByPageInternal(continuationToken, preferredPageSize, () -> this.pagedFlux.byPage(continuationToken, preferredPageSize).toStream(batchSize)); } @Override public Iterator<T> iterator() { return iterableByItemInternal().iterator(); } /** * Retrieve the {@link Iterable}, one page at a time. It will provide same {@link Iterable} of T values from * starting if called multiple times. * * @return {@link Stream} of a pages */ public Iterable<P> iterableByPage() { return iterableByPageInternal(null, null, () -> this.pagedFlux.byPage().toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, starting from the next page associated with the given * continuation token. To start from first page, use {@link * * @param continuationToken The continuation token used to fetch the next page * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(C continuationToken) { return iterableByPageInternal(continuationToken, null, () -> this.pagedFlux.byPage(continuationToken).toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, with each page containing {@code preferredPageSize} items. * * It will provide same {@link Iterable} of T values from starting if called multiple times. * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(int preferredPageSize) { return iterableByPageInternal(null, preferredPageSize, () -> this.pagedFlux.byPage(preferredPageSize).toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, with each page containing {@code preferredPageSize} items, * starting from the next page associated with the given continuation token. To start from first page, use {@link * * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @param continuationToken The continuation token used to fetch the next page * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(C continuationToken, int preferredPageSize) { return iterableByPageInternal(continuationToken, preferredPageSize, () -> this.pagedFlux.byPage(continuationToken, preferredPageSize).toIterable(batchSize)); } private Stream<P> streamByPageInternal(C continuationToken, Integer preferredPageSize, Supplier<Stream<P>> nonPagedFluxCoreIterableSupplier) { if (pagedFlux == null) { return StreamSupport.stream(iterableByPageInternal(continuationToken, preferredPageSize, null) .spliterator(), false); } if (pagedFlux instanceof ContinuablePagedFluxCore) { return StreamSupport.stream(iterableByPageInternal(continuationToken, preferredPageSize, null) .spliterator(), false); } else { return nonPagedFluxCoreIterableSupplier.get(); } } private Iterable<P> iterableByPageInternal(C continuationToken, Integer preferredPageSize, Supplier<Iterable<P>> nonPagedFluxCoreIterableSupplier) { if (pagedFlux == null) { return new ContinuablePagedByPageIterable<>(syncPageRetrieverProvider.get(), continuationToken, this.continuationPredicate, preferredPageSize); } if (pagedFlux instanceof ContinuablePagedFluxCore) { ContinuablePagedFluxCore<C, T, P> pagedFluxCore = (ContinuablePagedFluxCore<C, T, P>) pagedFlux; return new ContinuablePagedByPageIterable<>(pagedFluxCore.pageRetrieverProvider.get(), continuationToken, pagedFluxCore.getContinuationPredicate(), preferredPageSize); } else { return nonPagedFluxCoreIterableSupplier.get(); } } private Iterable<T> iterableByItemInternal() { if (pagedFlux == null) { return new ContinuablePagedByItemIterable<>(this.syncPageRetrieverProvider.get(), null, this.continuationPredicate, null); } if (pagedFlux instanceof ContinuablePagedFluxCore) { ContinuablePagedFluxCore<C, T, P> pagedFluxCore = (ContinuablePagedFluxCore<C, T, P>) pagedFlux; return new ContinuablePagedByItemIterable<>(pagedFluxCore.pageRetrieverProvider.get(), null, pagedFluxCore.getContinuationPredicate(), null); } else { return this.pagedFlux.toIterable(this.batchSize); } } }
class ContinuablePagedIterable<C, T, P extends ContinuablePage<C, T>> extends IterableStream<T> { private static final ClientLogger LOGGER = new ClientLogger(ContinuablePagedIterable.class); private final ContinuablePagedFlux<C, T, P> pagedFlux; private final int batchSize; private final Supplier<PageRetrieverSync<C, P>> pageRetrieverSyncProvider; final Integer defaultPageSize; private final Predicate<C> continuationPredicate; /** * Creates instance with the given {@link ContinuablePagedFlux}. * * @param pagedFlux the paged flux use as iterable */ public ContinuablePagedIterable(ContinuablePagedFlux<C, T, P> pagedFlux) { this(pagedFlux, 1); } /** * Creates instance with the given {@link ContinuablePagedFlux}. * * @param pagedFlux the paged flux use as iterable * @param batchSize the bounded capacity to prefetch from the {@link ContinuablePagedFlux} */ /** * Creates instance with the given {@link PageRetrieverSync provider}. * * @param pageRetrieverSyncProvider A provider that returns {@link PageRetrieverSync}. * @param pageSize The preferred page size. * @param continuationPredicate A predicate which determines if paging should continue. * @throws NullPointerException If {@code pageRetrieverSyncProvider} is null. * @throws IllegalArgumentException If {@code pageSize} is not null and is less than or equal to zero. */ public ContinuablePagedIterable(Supplier<PageRetrieverSync<C, P>> pageRetrieverSyncProvider, Integer pageSize, Predicate<C> continuationPredicate) { super(new ContinuablePagedByItemIterable<>(pageRetrieverSyncProvider.get(), null, continuationPredicate, pageSize)); this.pageRetrieverSyncProvider = Objects.requireNonNull(pageRetrieverSyncProvider, "'pageRetrieverSyncProvider' function cannot be null."); if (pageSize != null && pageSize <= 0) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'pageSize' must be greater than 0 required but provided: " + pageSize)); } this.continuationPredicate = (continuationPredicate == null) ? Objects::nonNull : continuationPredicate; this.defaultPageSize = pageSize; this.batchSize = 1; this.pagedFlux = null; } @Override public Stream<T> stream() { return StreamSupport.stream(iterableByItemInternal().spliterator(), false); } /** * Retrieve the {@link Stream}, one page at a time. It will provide same {@link Stream} of T values from starting if * called multiple times. * * @return {@link Stream} of a pages */ public Stream<P> streamByPage() { return streamByPageInternal(null, null, () -> this.pagedFlux.byPage().toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, starting from the next page associated with the given * continuation token. To start from first page, use {@link * * @param continuationToken The continuation token used to fetch the next page * @return {@link Stream} of a pages */ public Stream<P> streamByPage(C continuationToken) { return streamByPageInternal(continuationToken, null, () -> this.pagedFlux.byPage(continuationToken).toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, with each page containing {@code preferredPageSize} items. * * It will provide same {@link Stream} of T values from starting if called multiple times. * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @return {@link Stream} of a pages */ public Stream<P> streamByPage(int preferredPageSize) { return streamByPageInternal(null, preferredPageSize, () -> this.pagedFlux.byPage(preferredPageSize).toStream(batchSize)); } /** * Retrieve the {@link Stream}, one page at a time, with each page containing {@code preferredPageSize} items, * starting from the next page associated with the given continuation token. To start from first page, use {@link * * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @param continuationToken The continuation token used to fetch the next page * @return {@link Stream} of a pages */ public Stream<P> streamByPage(C continuationToken, int preferredPageSize) { return streamByPageInternal(continuationToken, preferredPageSize, () -> this.pagedFlux.byPage(continuationToken, preferredPageSize).toStream(batchSize)); } @Override public Iterator<T> iterator() { return iterableByItemInternal().iterator(); } /** * Retrieve the {@link Iterable}, one page at a time. It will provide same {@link Iterable} of T values from * starting if called multiple times. * * @return {@link Stream} of a pages */ public Iterable<P> iterableByPage() { return iterableByPageInternal(null, null, () -> this.pagedFlux.byPage().toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, starting from the next page associated with the given * continuation token. To start from first page, use {@link * * @param continuationToken The continuation token used to fetch the next page * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(C continuationToken) { return iterableByPageInternal(continuationToken, null, () -> this.pagedFlux.byPage(continuationToken).toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, with each page containing {@code preferredPageSize} items. * * It will provide same {@link Iterable} of T values from starting if called multiple times. * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(int preferredPageSize) { return iterableByPageInternal(null, preferredPageSize, () -> this.pagedFlux.byPage(preferredPageSize).toIterable(batchSize)); } /** * Retrieve the {@link Iterable}, one page at a time, with each page containing {@code preferredPageSize} items, * starting from the next page associated with the given continuation token. To start from first page, use {@link * * * @param preferredPageSize the preferred page size, service may or may not honor the page size preference hence * client MUST be prepared to handle pages with different page size. * @param continuationToken The continuation token used to fetch the next page * @return {@link Iterable} of a pages */ public Iterable<P> iterableByPage(C continuationToken, int preferredPageSize) { return iterableByPageInternal(continuationToken, preferredPageSize, () -> this.pagedFlux.byPage(continuationToken, preferredPageSize).toIterable(batchSize)); } private Stream<P> streamByPageInternal(C continuationToken, Integer preferredPageSize, Supplier<Stream<P>> nonPagedFluxCoreIterableSupplier) { if (pagedFlux == null) { return StreamSupport.stream(iterableByPageInternal(continuationToken, preferredPageSize, null) .spliterator(), false); } if (pagedFlux instanceof ContinuablePagedFluxCore) { return StreamSupport.stream(iterableByPageInternal(continuationToken, preferredPageSize, null) .spliterator(), false); } else { return nonPagedFluxCoreIterableSupplier.get(); } } private Iterable<P> iterableByPageInternal(C continuationToken, Integer preferredPageSize, Supplier<Iterable<P>> nonPagedFluxCoreIterableSupplier) { if (pagedFlux == null) { return new ContinuablePagedByPageIterable<>(pageRetrieverSyncProvider.get(), continuationToken, this.continuationPredicate, preferredPageSize); } if (pagedFlux instanceof ContinuablePagedFluxCore) { ContinuablePagedFluxCore<C, T, P> pagedFluxCore = (ContinuablePagedFluxCore<C, T, P>) pagedFlux; return new ContinuablePagedByPageIterable<>(pagedFluxCore.pageRetrieverProvider.get(), continuationToken, pagedFluxCore.getContinuationPredicate(), preferredPageSize); } else { return nonPagedFluxCoreIterableSupplier.get(); } } private Iterable<T> iterableByItemInternal() { if (pagedFlux == null) { return new ContinuablePagedByItemIterable<>(this.pageRetrieverSyncProvider.get(), null, this.continuationPredicate, null); } if (pagedFlux instanceof ContinuablePagedFluxCore) { ContinuablePagedFluxCore<C, T, P> pagedFluxCore = (ContinuablePagedFluxCore<C, T, P>) pagedFlux; return new ContinuablePagedByItemIterable<>(pagedFluxCore.pageRetrieverProvider.get(), null, pagedFluxCore.getContinuationPredicate(), null); } else { return this.pagedFlux.toIterable(this.batchSize); } } }
@pubudu538 ATM T1 values are all capital. eg: `ARRAY`. Shall we make it like `Array`?
private String getTypeErrorMessage(Type found) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.TYPE); if (typeCustomMessage == null) { return String.format("key '%s' expects %s . found %s", this.key, schema.type(), found); } return typeCustomMessage; }
return String.format("key '%s' expects %s . found %s", this.key, schema.type(), found);
private String getTypeErrorMessage(Type found) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.TYPE); if (typeCustomMessage == null) { return String.format("incompatible type for key '%s': expected '%s', found '%s'", this.key, schema.type(), found); } return typeCustomMessage; }
class SchemaValidator extends TomlNodeVisitor { private static final String PROPERTY_HOLDER = "${property}"; private AbstractSchema schema; private String key; public SchemaValidator(Schema schema) { this.schema = schema; } @Override public void visit(TomlTableNode tomlTableNode) { if (schema.type() != Type.OBJECT) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlTableNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.OBJECT)); tomlTableNode.addDiagnostic(diagnostic); return; } Schema objectSchema = (Schema) schema; Map<String, AbstractSchema> properties = objectSchema.properties(); List<String> requiredFields = getRequiredFields(objectSchema); Map<String, TopLevelNode> tableEntries = tomlTableNode.entries(); for (Map.Entry<String, TopLevelNode> tableEntry : tableEntries.entrySet()) { String key = tableEntry.getKey(); requiredFields.remove(key); TopLevelNode value = tableEntry.getValue(); AbstractSchema schema = properties.get(key); if (schema != null) { visitNode(value, schema, key); continue; } if (!objectSchema.hasAdditionalProperties()) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo("TVE0001", "error.unexpected.property", DiagnosticSeverity.ERROR); TomlDiagnostic diagnostic = new TomlDiagnostic(value.location(), diagnosticInfo, getUnexpectedPropertyErrorMessage(key)); tomlTableNode.addDiagnostic(diagnostic); } } for (String field : requiredFields) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo("TVE0006", "error.required.field.missing", DiagnosticSeverity.ERROR); TomlDiagnostic diagnostic = new TomlDiagnostic(tomlTableNode.location(), diagnosticInfo, getRequiredErrorMessage(field)); tomlTableNode.addDiagnostic(diagnostic); } } private String getRequiredErrorMessage(String field) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.REQUIRED); if (typeCustomMessage == null) { return "missing required field '" + field + "'"; } return typeCustomMessage.replace(PROPERTY_HOLDER, field); } private String getUnexpectedPropertyErrorMessage(String key) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.ADDITIONAL_PROPERTIES); if (typeCustomMessage == null) { return "unexpected property '" + key + "'"; } return typeCustomMessage.replace(PROPERTY_HOLDER, key); } @Override public void visit(TomlTableArrayNode tomlTableArrayNode) { if (schema.type() != Type.ARRAY) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlTableArrayNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.ARRAY)); tomlTableArrayNode.addDiagnostic(diagnostic); return; } ArraySchema arraySchema = (ArraySchema) schema; AbstractSchema memberSchema = arraySchema.items(); List<TomlTableNode> children = tomlTableArrayNode.children(); for (TomlTableNode child : children) { visitNode(child, memberSchema); } } @Override public void visit(TomlKeyValueNode keyValue) { TomlValueNode value = keyValue.value(); visitNode(value); } @Override public void visit(TomlValueNode tomlValue) { visitNode(tomlValue); } private String getPatternErrorMessage(String pattern) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.PATTERN); if (typeCustomMessage == null) { return String.format("key '%s' value does not match the regex provided in schema %s", this.key, pattern); } return typeCustomMessage; } @Override public void visit(TomlStringValueNode tomlStringValueNode) { if (schema.type() != Type.STRING) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlStringValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.STRING)); tomlStringValueNode.addDiagnostic(diagnostic); return; } StringSchema stringSchema = (StringSchema) this.schema; if (stringSchema.pattern().isPresent()) { String pattern = stringSchema.pattern().get(); if (!Pattern.compile(pattern).matcher(tomlStringValueNode.getValue()).matches()) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlStringValueNode.location(), "TVE0003", "error.regex.mismatch", DiagnosticSeverity.ERROR, getPatternErrorMessage(pattern)); tomlStringValueNode.addDiagnostic(diagnostic); } } } @Override public void visit(TomlDoubleValueNodeNode tomlDoubleValueNodeNode) { if (schema.type() != Type.NUMBER) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlDoubleValueNodeNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.NUMBER)); tomlDoubleValueNodeNode.addDiagnostic(diagnostic); return; } List<Diagnostic> diagnostics = validateMinMaxValues((NumericSchema) schema, tomlDoubleValueNodeNode.getValue(), tomlDoubleValueNodeNode.location()); tomlDoubleValueNodeNode.addDiagnostics(diagnostics); } @Override public void visit(TomlLongValueNode tomlLongValueNode) { if (schema.type() != Type.INTEGER) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlLongValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.INTEGER)); tomlLongValueNode.addDiagnostic(diagnostic); return; } List<Diagnostic> diagnostics = validateMinMaxValues((NumericSchema) schema, Double.valueOf(tomlLongValueNode.getValue()), tomlLongValueNode.location()); for (Diagnostic diagnostic : diagnostics) { tomlLongValueNode.addDiagnostic(diagnostic); } } @Override public void visit(TomlArrayValueNode tomlArrayValueNode) { if (schema.type() != Type.ARRAY) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlArrayValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.ARRAY)); tomlArrayValueNode.addDiagnostic(diagnostic); return; } ArraySchema arraySchema = (ArraySchema) schema; AbstractSchema items = arraySchema.items(); for (TomlValueNode valueNode : tomlArrayValueNode.elements()) { visitNode(valueNode, items); } } private String getMaxValueExceedErrorMessage(Double max) { Map<String, String> message = this.schema.message(); String maxCustomMessage = message.get(SchemaDeserializer.MAXIMUM); if (maxCustomMessage == null) { return String.format("key '%s' value can't be higher than %f", this.key, max); } return maxCustomMessage; } private String getMinValueDeceedErrorMessage(Double min) { Map<String, String> message = this.schema.message(); String minCustomMessage = message.get(SchemaDeserializer.MINIMUM); if (minCustomMessage == null) { return String.format("key '%s' value can't be lower than %f", this.key, min); } return minCustomMessage; } private List<Diagnostic> validateMinMaxValues(NumericSchema numericSchema, Double value, TomlNodeLocation location) { List<Diagnostic> diagnostics = new ArrayList<>(); if (numericSchema.maximum().isPresent()) { Double max = numericSchema.maximum().get(); if (value > max) { TomlDiagnostic diagnostic = getTomlDiagnostic(location, "TVE0005", "error" + ".maximum.value.exceed", DiagnosticSeverity.ERROR, getMaxValueExceedErrorMessage(max)); diagnostics.add(diagnostic); } } if (numericSchema.minimum().isPresent()) { Double min = numericSchema.minimum().get(); if (value < min) { TomlDiagnostic diagnostic = getTomlDiagnostic(location, "TVE0004", "error.minimum.value.deceed", DiagnosticSeverity.ERROR, getMinValueDeceedErrorMessage(min)); diagnostics.add(diagnostic); } } return diagnostics; } @Override public void visit(TomlBooleanValueNode tomlBooleanValueNode) { if (schema.type() != Type.BOOLEAN) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlBooleanValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.BOOLEAN)); tomlBooleanValueNode.addDiagnostic(diagnostic); } } private void visitNode(TomlNode node) { AbstractSchema previousSchema = this.schema; String previousKey = this.key; node.accept(this); this.schema = previousSchema; this.key = previousKey; } private void visitNode(TomlNode node, AbstractSchema schema) { AbstractSchema previousSchema = this.schema; this.schema = schema; node.accept(this); this.schema = previousSchema; } private void visitNode(TomlNode node, AbstractSchema schema, String key) { AbstractSchema previousSchema = this.schema; String previousKey = this.key; this.schema = schema; this.key = key; node.accept(this); this.schema = previousSchema; this.key = previousKey; } private TomlDiagnostic getTomlDiagnostic(TomlNodeLocation location, String code, String template, DiagnosticSeverity severity, String message) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo(code, template, severity); return new TomlDiagnostic(location, diagnosticInfo, message); } private List<String> getRequiredFields(Schema objectSchema) { if (objectSchema.required() == null) { return new ArrayList<>(); } return objectSchema.required(); } }
class SchemaValidator extends TomlNodeVisitor { private static final String PROPERTY_HOLDER = "${property}"; private AbstractSchema schema; private String key; private String schemaTitle; public SchemaValidator(Schema schema) { this.schema = schema; this.schemaTitle = schema.title(); } @Override public void visit(TomlTableNode tomlTableNode) { if (schema.type() != Type.OBJECT) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlTableNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.OBJECT)); tomlTableNode.addDiagnostic(diagnostic); return; } Schema objectSchema = (Schema) schema; Map<String, AbstractSchema> properties = objectSchema.properties(); List<String> requiredFields = getRequiredFields(objectSchema); Map<String, TopLevelNode> tableEntries = tomlTableNode.entries(); for (Map.Entry<String, TopLevelNode> tableEntry : tableEntries.entrySet()) { String key = tableEntry.getKey(); requiredFields.remove(key); TopLevelNode value = tableEntry.getValue(); AbstractSchema schema = properties.get(key); if (schema != null) { visitNode(value, schema, key); continue; } if (!objectSchema.hasAdditionalProperties()) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo("TVE0001", "error.unexpected.property", DiagnosticSeverity.ERROR); TomlDiagnostic diagnostic = new TomlDiagnostic(value.location(), diagnosticInfo, getUnexpectedPropertyErrorMessage(key)); tomlTableNode.addDiagnostic(diagnostic); } } for (String field : requiredFields) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo("TVE0006", "error.required.field.missing", DiagnosticSeverity.ERROR); TomlDiagnostic diagnostic = new TomlDiagnostic(tomlTableNode.location(), diagnosticInfo, getRequiredErrorMessage(field)); tomlTableNode.addDiagnostic(diagnostic); } } private String getRequiredErrorMessage(String field) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.REQUIRED); if (typeCustomMessage == null) { return "missing required field '" + field + "'"; } return typeCustomMessage.replace(PROPERTY_HOLDER, field); } private String getUnexpectedPropertyErrorMessage(String key) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.ADDITIONAL_PROPERTIES); if (typeCustomMessage == null) { return String.format("key '%s' not supported in schema '%s'", key, schemaTitle); } return typeCustomMessage.replace(PROPERTY_HOLDER, key); } @Override public void visit(TomlTableArrayNode tomlTableArrayNode) { if (schema.type() != Type.ARRAY) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlTableArrayNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.ARRAY)); tomlTableArrayNode.addDiagnostic(diagnostic); return; } ArraySchema arraySchema = (ArraySchema) schema; AbstractSchema memberSchema = arraySchema.items(); List<TomlTableNode> children = tomlTableArrayNode.children(); for (TomlTableNode child : children) { visitNode(child, memberSchema); } } @Override public void visit(TomlKeyValueNode keyValue) { TomlValueNode value = keyValue.value(); visitNode(value); } @Override public void visit(TomlValueNode tomlValue) { visitNode(tomlValue); } private String getPatternErrorMessage(String pattern) { Map<String, String> message = this.schema.message(); String typeCustomMessage = message.get(SchemaDeserializer.PATTERN); if (typeCustomMessage == null) { return String.format("value for key '%s' expected to match the regex: %s", this.key, pattern); } return typeCustomMessage; } @Override public void visit(TomlStringValueNode tomlStringValueNode) { if (schema.type() != Type.STRING) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlStringValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.STRING)); tomlStringValueNode.addDiagnostic(diagnostic); return; } StringSchema stringSchema = (StringSchema) this.schema; if (stringSchema.pattern().isPresent()) { String pattern = stringSchema.pattern().get(); if (!Pattern.compile(pattern).matcher(tomlStringValueNode.getValue()).matches()) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlStringValueNode.location(), "TVE0003", "error.regex.mismatch", DiagnosticSeverity.ERROR, getPatternErrorMessage(pattern)); tomlStringValueNode.addDiagnostic(diagnostic); } } } @Override public void visit(TomlDoubleValueNodeNode tomlDoubleValueNodeNode) { if (schema.type() != Type.NUMBER) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlDoubleValueNodeNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.NUMBER)); tomlDoubleValueNodeNode.addDiagnostic(diagnostic); return; } List<Diagnostic> diagnostics = validateMinMaxValues((NumericSchema) schema, tomlDoubleValueNodeNode.getValue(), tomlDoubleValueNodeNode.location()); tomlDoubleValueNodeNode.addDiagnostics(diagnostics); } @Override public void visit(TomlLongValueNode tomlLongValueNode) { if (schema.type() != Type.INTEGER) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlLongValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.INTEGER)); tomlLongValueNode.addDiagnostic(diagnostic); return; } List<Diagnostic> diagnostics = validateMinMaxValues((NumericSchema) schema, Double.valueOf(tomlLongValueNode.getValue()), tomlLongValueNode.location()); for (Diagnostic diagnostic : diagnostics) { tomlLongValueNode.addDiagnostic(diagnostic); } } @Override public void visit(TomlArrayValueNode tomlArrayValueNode) { if (schema.type() != Type.ARRAY) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlArrayValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.ARRAY)); tomlArrayValueNode.addDiagnostic(diagnostic); return; } ArraySchema arraySchema = (ArraySchema) schema; AbstractSchema items = arraySchema.items(); for (TomlValueNode valueNode : tomlArrayValueNode.elements()) { visitNode(valueNode, items); } } private String getMaxValueExceedErrorMessage(Double max) { Map<String, String> message = this.schema.message(); String maxCustomMessage = message.get(SchemaDeserializer.MAXIMUM); if (maxCustomMessage == null) { return String.format("value for key '%s' can't be higher than %f", this.key, max); } return maxCustomMessage; } private String getMinValueDeceedErrorMessage(Double min) { Map<String, String> message = this.schema.message(); String minCustomMessage = message.get(SchemaDeserializer.MINIMUM); if (minCustomMessage == null) { return String.format("value for key '%s' can't be lower than %f", this.key, min); } return minCustomMessage; } private List<Diagnostic> validateMinMaxValues(NumericSchema numericSchema, Double value, TomlNodeLocation location) { List<Diagnostic> diagnostics = new ArrayList<>(); if (numericSchema.maximum().isPresent()) { Double max = numericSchema.maximum().get(); if (value > max) { TomlDiagnostic diagnostic = getTomlDiagnostic(location, "TVE0005", "error" + ".maximum.value.exceed", DiagnosticSeverity.ERROR, getMaxValueExceedErrorMessage(max)); diagnostics.add(diagnostic); } } if (numericSchema.minimum().isPresent()) { Double min = numericSchema.minimum().get(); if (value < min) { TomlDiagnostic diagnostic = getTomlDiagnostic(location, "TVE0004", "error.minimum.value.deceed", DiagnosticSeverity.ERROR, getMinValueDeceedErrorMessage(min)); diagnostics.add(diagnostic); } } return diagnostics; } @Override public void visit(TomlBooleanValueNode tomlBooleanValueNode) { if (schema.type() != Type.BOOLEAN) { TomlDiagnostic diagnostic = getTomlDiagnostic(tomlBooleanValueNode.location(), "TVE0002", "error.invalid.type", DiagnosticSeverity.ERROR, getTypeErrorMessage(Type.BOOLEAN)); tomlBooleanValueNode.addDiagnostic(diagnostic); } } private void visitNode(TomlNode node) { AbstractSchema previousSchema = this.schema; String previousKey = this.key; node.accept(this); this.schema = previousSchema; this.key = previousKey; } private void visitNode(TomlNode node, AbstractSchema schema) { AbstractSchema previousSchema = this.schema; this.schema = schema; node.accept(this); this.schema = previousSchema; } private void visitNode(TomlNode node, AbstractSchema schema, String key) { AbstractSchema previousSchema = this.schema; String previousKey = this.key; this.schema = schema; this.key = key; node.accept(this); this.schema = previousSchema; this.key = previousKey; } private TomlDiagnostic getTomlDiagnostic(TomlNodeLocation location, String code, String template, DiagnosticSeverity severity, String message) { DiagnosticInfo diagnosticInfo = new DiagnosticInfo(code, template, severity); return new TomlDiagnostic(location, diagnosticInfo, message); } private List<String> getRequiredFields(Schema objectSchema) { if (objectSchema.required() == null) { return new ArrayList<>(); } return objectSchema.required(); } }
`totalCount` has been removed from accumulator in the latest commit, and will be calculated in `getValue()` once needed.
public Double[] getValue() { List<Pair<Double, Integer>> sortedPercentages = new ArrayList<>(); for (int index = 0; index < percentages.length; index++) { sortedPercentages.add(new Pair<>(percentages[index] * (totalCount - 1) + 1, index)); } sortedPercentages.sort(Comparator.comparing(Pair::getKey)); List<Map.Entry<Double, Long>> sortedList = new ArrayList<>(); try { for (Map.Entry<Double, Long> entry : valueCount.entries()) { sortedList.add(entry); } } catch (Exception e) { throw new FlinkRuntimeException(e); } sortedList.sort(Map.Entry.comparingByKey()); Double[] percentiles = new Double[percentages.length]; long preCnt = sortedList.get(0).getValue(); for (int i = 0, j = 0; i < sortedPercentages.size(); i++) { Pair<Double, Integer> entry = sortedPercentages.get(i); double position = entry.getKey(); long lower = (long) Math.floor(position); long higher = (long) Math.ceil(position); while (preCnt < lower) { j++; preCnt += sortedList.get(j).getValue(); } percentiles[entry.getValue()] = preCnt >= higher ? sortedList.get(j).getKey() : (higher - position) * sortedList.get(j).getKey() + (position - lower) * sortedList.get(j + 1).getKey(); } return percentiles; }
public Double[] getValue() { long totalCount = 0L; List<Map.Entry<Double, Long>> sortedList = new ArrayList<>(); try { for (Map.Entry<Double, Long> entry : valueCount.entries()) { sortedList.add(entry); totalCount += entry.getValue(); } } catch (Exception e) { throw new FlinkRuntimeException(e); } if (totalCount <= 0) { return null; } sortedList.sort(Map.Entry.comparingByKey()); List<Pair<Double, Integer>> sortedPercentages = new ArrayList<>(); for (int index = 0; index < percentages.length; index++) { sortedPercentages.add(new Pair<>(percentages[index] * (totalCount - 1) + 1, index)); } sortedPercentages.sort(Comparator.comparing(Pair::getKey)); Double[] percentiles = new Double[percentages.length]; long preCnt = sortedList.get(0).getValue(); for (int i = 0, j = 0; i < sortedPercentages.size(); i++) { Pair<Double, Integer> entry = sortedPercentages.get(i); double position = entry.getKey(); long lower = (long) Math.floor(position); long higher = (long) Math.ceil(position); while (preCnt < lower) { j++; preCnt += sortedList.get(j).getValue(); } percentiles[entry.getValue()] = preCnt >= higher ? sortedList.get(j).getKey() : (higher - position) * sortedList.get(j).getKey() + (position - lower) * sortedList.get(j + 1).getKey(); } return percentiles; }
class PercentileAccumulator { public double[] percentages; public long totalCount; public MapView<Double, Long> valueCount; @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PercentileAccumulator that = (PercentileAccumulator) o; return Arrays.equals(percentages, that.percentages) && totalCount == that.totalCount && Objects.equals(valueCount, that.valueCount); } @Override public int hashCode() { return Objects.hash(Arrays.hashCode(percentages), totalCount, valueCount.hashCode()); } public void setPercentages(Double percentage) { if (percentage < 0.0 || percentage > 1.0) { throw new IllegalArgumentException( String.format( "Percentage of PERCENTILE should be between [0.0, 1.0], but was '%s'.", percentage)); } percentages = new double[] {percentage}; } public void setPercentages(Double[] percentage) { percentages = new double[percentage.length]; for (int i = 0; i < percentages.length; i++) { if (percentage[i] < 0.0 || percentage[i] > 1.0) { throw new IllegalArgumentException( String.format( "Percentage of PERCENTILE should be between [0.0, 1.0], but was '%s'.", percentage[i])); } percentages[i] = percentage[i]; } } }
class PercentileAccumulator { public double[] percentages; public MapView<Double, Long> valueCount; @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PercentileAccumulator that = (PercentileAccumulator) o; return Arrays.equals(percentages, that.percentages) && Objects.equals(valueCount, that.valueCount); } @Override public int hashCode() { return Objects.hash(Arrays.hashCode(percentages), valueCount.hashCode()); } public void setPercentages(Double percentage) { if (percentage < 0.0 || percentage > 1.0) { throw new IllegalArgumentException( String.format( "Percentage of PERCENTILE should be between [0.0, 1.0], but was '%s'.", percentage)); } percentages = new double[] {percentage}; } public void setPercentages(Double[] percentage) { percentages = new double[percentage.length]; for (int i = 0; i < percentages.length; i++) { if (percentage[i] < 0.0 || percentage[i] > 1.0) { throw new IllegalArgumentException( String.format( "Percentage of PERCENTILE should be between [0.0, 1.0], but was '%s'.", percentage[i])); } percentages[i] = percentage[i]; } } }
Imo it just shouldnt print anythiNg unless debug. Why would we want those behind firewalls to get annoying message on every build?
public void close() { try { CompletableFuture.allOf(postFutures.toArray(new CompletableFuture[0])).get( PropertyUtils.getProperty("quarkus.analytics.timeout", DEFAULT_TIMEOUT), TimeUnit.MILLISECONDS); if (log.isDebugEnabled() && !postFutures.isEmpty()) { log.debug("[Quarkus build analytics] Build analytics sent successfully. Sent event can be seen at .../target/" + fileLocations.lastTrackFileName()); } } catch (ExecutionException | TimeoutException e) { if (log.isDebugEnabled()) { log.debug("[Quarkus build analytics] Failed to send build analytics to Segment: " + e.getClass().getName() + ": " + (e.getMessage() == null ? "(no message)" : e.getMessage())); } log.info("[Quarkus build analytics] Failed to send build analytics to Segment. " + "Connection might not be available or is too slow."); } catch (Exception e) { log.warn("[Quarkus build analytics] Failed to send build analytics to Segment: " + e.getClass().getName() + ": " + (e.getMessage() == null ? "(no message)" : e.getMessage())); } }
log.info("[Quarkus build analytics] Failed to send build analytics to Segment. " +
public void close() { try { CompletableFuture.allOf(postFutures.toArray(new CompletableFuture[0])).get( PropertyUtils.getProperty("quarkus.analytics.timeout", DEFAULT_TIMEOUT), TimeUnit.MILLISECONDS); if (log.isDebugEnabled() && !postFutures.isEmpty()) { log.debug("[Quarkus build analytics] Build analytics sent successfully. Sent event can be seen at .../target/" + fileLocations.lastTrackFileName()); } } catch (ExecutionException | TimeoutException e) { if (log.isDebugEnabled()) { log.debug("[Quarkus build analytics] Failed to send build analytics to Segment. " + "Connection might not be available or is too slow: " + e.getClass().getName() + ": " + (e.getMessage() == null ? "(no message)" : e.getMessage())); } } catch (Exception e) { if (log.isDebugEnabled()) { log.debug("[Quarkus build analytics] Failed to send build analytics to Segment: " + e.getClass().getName() + ": " + (e.getMessage() == null ? "(no message)" : e.getMessage())); } } }
class AnalyticsService implements AutoCloseable { private final Queue<CompletableFuture<HttpResponse<String>>> postFutures; final private RestClient restClient; final private ConfigService config; final private AnonymousUserId userId; final private MessageWriter log; final FileLocations fileLocations; public AnalyticsService(final FileLocations fileLocations, MessageWriter log) { this.fileLocations = fileLocations; this.log = log; this.postFutures = new ConcurrentLinkedQueue<>(); this.restClient = new RestClient(log); this.userId = AnonymousUserId.getInstance(fileLocations, log); this.config = new ConfigService(this.restClient, this.userId, fileLocations, log); } public void buildAnalyticsUserInput(Function<String, String> analyticsEnabledSupplier) { this.config.userAcceptance(analyticsEnabledSupplier); } public void sendAnalytics(final TrackEventType trackEventType, ApplicationModel applicationModel, final Map<String, Object> buildInfo, final File localBuildDir) { if (this.config.isActive() && this.config.isArtifactActive( applicationModel.getAppArtifact().getGroupId(), getQuarkusVersion(applicationModel))) { final Map<String, Object> context = createContextMap(applicationModel, buildInfo); sendIdentity(context); Track trackEvent = Track.builder() .userId(userId.getUuid()) .context(context) .event(trackEventType) .properties(TrackProperties.builder() .appExtensions(createExtensionsPropertyValue(applicationModel)) .build()) .timestamp(Instant.now()) .build(); postFutures.add(this.restClient.postTrack(trackEvent)); try { FileUtils.overwrite(trackEvent, Path.of(localBuildDir.getAbsolutePath(), fileLocations.lastTrackFileName())); } catch (IOException e) { log.warn("[Quarkus build analytics] Failed to write the last analytics file. " + e.getMessage()); } } } @Override List<TrackProperties.AppExtension> createExtensionsPropertyValue(ApplicationModel applicationModel) { return applicationModel.getDependencies().stream() .filter(dep -> dep.isResolved() && dep.isFlagSet(TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT) && onlyPublic(dep.getGroupId(), log)) .map(dep -> TrackProperties.AppExtension.builder() .groupId(dep.getGroupId()) .artifactId(dep.getArtifactId()) .version(dep.getVersion()) .build()) .collect(Collectors.toList()); } void sendIdentity(final Map<String, Object> context) { if (this.userId.isNew()) { this.restClient.postIdentity(Identity.builder() .userId(this.userId.getUuid()) .context(context) .timestamp(Instant.now()) .build()); } } Map<String, Object> createContextMap(ApplicationModel applicationModel, Map<String, Object> buildInfo) { ArtifactCoords moduleId = applicationModel.getAppArtifact(); return new ContextBuilder() .mapPair(PROP_APP) .pair(PROP_NAME, hashSHA256(moduleId.getGroupId() + ":" + moduleId.getArtifactId())) .pair(PROP_VERSION, hashSHA256(moduleId.getArtifactId() + ":" + moduleId.getVersion())) .build() .mapPair(PROP_JAVA) .pair(PROP_VENDOR, getProperty("java.vendor", "N/A")) .pair(PROP_VERSION, getProperty("java.version", "N/A")) .build() .mapPair(PROP_GRAALVM) .pair(PROP_VENDOR, ofNullable(buildInfo.get(GRAALVM_VERSION_DISTRIBUTION)).orElse("N/A")) .pair(PROP_VERSION, ofNullable(buildInfo.get(GRAALVM_VERSION_VERSION)).orElse("N/A")) .pair(PROP_JAVA_VERSION, ofNullable(buildInfo.get(GRAALVM_VERSION_JAVA)).orElse("N/A")) .build() .mapPair(PROP_BUILD) .pair(PROP_MAVEN_VERSION, ofNullable(buildInfo.get(MAVEN_VERSION)).orElse("N/A")) .pair(PROP_GRADLE_VERSION, ofNullable(buildInfo.get(GRADLE_VERSION)).orElse("N/A")) .build() .mapPair(PROP_QUARKUS) .pair(PROP_VERSION, getQuarkusVersion(applicationModel)) .build() .pair(PROP_IP, VALUE_NULL_IP) .mapPair(PROP_LOCATION) .pair(PROP_LOCALE_COUNTRY, Locale.getDefault().getCountry()) .build() .mapPair(PROP_OS) .pair(PROP_NAME, getProperty("os.name", "N/A")) .pair(PROP_VERSION, getProperty("os.version", "N/A")) .pair(PROP_OS_ARCH, getProperty("os.arch", "N/A")) .build() .mapPair(PROP_CI) .pair(PROP_CI_NAME, getBuildSystemName()) .build() .mapPair(PROP_KUBERNETES) .pair(PROP_DETECTED, isKubernetesDetected()) .build() .pair(PROP_TIMEZONE, ZoneId.systemDefault().getDisplayName(TextStyle.NARROW, Locale.ENGLISH)) .build(); } private String isKubernetesDetected() { return Boolean.toString(allEnvSet( "KUBERNETES_SERVICE_HOST", "KUBERNETES_SERVICE_PORT")); } private String getBuildSystemName() { String travis = getenv("TRAVIS"); String user = getenv("USER"); if ("true".equals(travis) && "travis".equals(user)) { return "travis"; } if (allEnvSet("JENKINS_URL", "JENKINS_HOME", "WORKSPACE")) { return "jenkins"; } if (allEnvSet("GITHUB_WORKFLOW", "GITHUB_WORKSPACE", "GITHUB_RUN_ID")) { return "github-actions"; } if (allEnvSet("BUILD_REASON", "AGENT_JOBSTATUS")) { return "azure-pipelines"; } return "unknown"; } private boolean allEnvSet(String... names) { for (String name : names) { if (getenv(name) == null) { return false; } } return true; } private String getQuarkusVersion(ApplicationModel applicationModel) { return applicationModel.getPlatforms().getImportedPlatformBoms().stream() .filter(artifactCoords -> artifactCoords.getArtifactId().equals("quarkus-bom")) .map(ArtifactCoords::getVersion) .findFirst() .orElse("N/A"); } }
class AnalyticsService implements AutoCloseable { private final Queue<CompletableFuture<HttpResponse<String>>> postFutures; final private RestClient restClient; final private ConfigService config; final private AnonymousUserId userId; final private MessageWriter log; final FileLocations fileLocations; public AnalyticsService(final FileLocations fileLocations, MessageWriter log) { this.fileLocations = fileLocations; this.log = log; this.postFutures = new ConcurrentLinkedQueue<>(); this.restClient = new RestClient(log); this.userId = AnonymousUserId.getInstance(fileLocations, log); this.config = new ConfigService(this.restClient, this.userId, fileLocations, log); } public void buildAnalyticsUserInput(Function<String, String> analyticsEnabledSupplier) { this.config.userAcceptance(analyticsEnabledSupplier); } public void sendAnalytics(final TrackEventType trackEventType, ApplicationModel applicationModel, final Map<String, Object> buildInfo, final File localBuildDir) { if (this.config.isActive() && this.config.isArtifactActive( applicationModel.getAppArtifact().getGroupId(), getQuarkusVersion(applicationModel))) { final Map<String, Object> context = createContextMap(applicationModel, buildInfo); sendIdentity(context); Track trackEvent = Track.builder() .userId(userId.getUuid()) .context(context) .event(trackEventType) .properties(TrackProperties.builder() .appExtensions(createExtensionsPropertyValue(applicationModel)) .build()) .timestamp(Instant.now()) .build(); postFutures.add(this.restClient.postTrack(trackEvent)); try { FileUtils.overwrite(trackEvent, Path.of(localBuildDir.getAbsolutePath(), fileLocations.lastTrackFileName())); } catch (IOException e) { log.warn("[Quarkus build analytics] Failed to write the last analytics file. " + e.getMessage()); } } } @Override List<TrackProperties.AppExtension> createExtensionsPropertyValue(ApplicationModel applicationModel) { return applicationModel.getDependencies().stream() .filter(dep -> dep.isResolved() && dep.isFlagSet(TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT) && onlyPublic(dep.getGroupId(), log)) .map(dep -> TrackProperties.AppExtension.builder() .groupId(dep.getGroupId()) .artifactId(dep.getArtifactId()) .version(dep.getVersion()) .build()) .collect(Collectors.toList()); } void sendIdentity(final Map<String, Object> context) { if (this.userId.isNew()) { this.restClient.postIdentity(Identity.builder() .userId(this.userId.getUuid()) .context(context) .timestamp(Instant.now()) .build()); } } Map<String, Object> createContextMap(ApplicationModel applicationModel, Map<String, Object> buildInfo) { ArtifactCoords moduleId = applicationModel.getAppArtifact(); return new ContextBuilder() .mapPair(PROP_APP) .pair(PROP_NAME, hashSHA256(moduleId.getGroupId() + ":" + moduleId.getArtifactId())) .pair(PROP_VERSION, hashSHA256(moduleId.getArtifactId() + ":" + moduleId.getVersion())) .build() .mapPair(PROP_JAVA) .pair(PROP_VENDOR, getProperty("java.vendor", "N/A")) .pair(PROP_VERSION, getProperty("java.version", "N/A")) .build() .mapPair(PROP_GRAALVM) .pair(PROP_VENDOR, ofNullable(buildInfo.get(GRAALVM_VERSION_DISTRIBUTION)).orElse("N/A")) .pair(PROP_VERSION, ofNullable(buildInfo.get(GRAALVM_VERSION_VERSION)).orElse("N/A")) .pair(PROP_JAVA_VERSION, ofNullable(buildInfo.get(GRAALVM_VERSION_JAVA)).orElse("N/A")) .build() .mapPair(PROP_BUILD) .pair(PROP_MAVEN_VERSION, ofNullable(buildInfo.get(MAVEN_VERSION)).orElse("N/A")) .pair(PROP_GRADLE_VERSION, ofNullable(buildInfo.get(GRADLE_VERSION)).orElse("N/A")) .build() .mapPair(PROP_QUARKUS) .pair(PROP_VERSION, getQuarkusVersion(applicationModel)) .build() .pair(PROP_IP, VALUE_NULL_IP) .mapPair(PROP_LOCATION) .pair(PROP_LOCALE_COUNTRY, Locale.getDefault().getCountry()) .build() .mapPair(PROP_OS) .pair(PROP_NAME, getProperty("os.name", "N/A")) .pair(PROP_VERSION, getProperty("os.version", "N/A")) .pair(PROP_OS_ARCH, getProperty("os.arch", "N/A")) .build() .mapPair(PROP_CI) .pair(PROP_CI_NAME, getBuildSystemName()) .build() .mapPair(PROP_KUBERNETES) .pair(PROP_DETECTED, isKubernetesDetected()) .build() .pair(PROP_TIMEZONE, ZoneId.systemDefault().getDisplayName(TextStyle.NARROW, Locale.ENGLISH)) .build(); } private String isKubernetesDetected() { return Boolean.toString(allEnvSet( "KUBERNETES_SERVICE_HOST", "KUBERNETES_SERVICE_PORT")); } private String getBuildSystemName() { String travis = getenv("TRAVIS"); String user = getenv("USER"); if ("true".equals(travis) && "travis".equals(user)) { return "travis"; } if (allEnvSet("JENKINS_URL", "JENKINS_HOME", "WORKSPACE")) { return "jenkins"; } if (allEnvSet("GITHUB_WORKFLOW", "GITHUB_WORKSPACE", "GITHUB_RUN_ID")) { return "github-actions"; } if (allEnvSet("BUILD_REASON", "AGENT_JOBSTATUS")) { return "azure-pipelines"; } return "unknown"; } private boolean allEnvSet(String... names) { for (String name : names) { if (getenv(name) == null) { return false; } } return true; } private String getQuarkusVersion(ApplicationModel applicationModel) { return applicationModel.getPlatforms().getImportedPlatformBoms().stream() .filter(artifactCoords -> artifactCoords.getArtifactId().equals("quarkus-bom")) .map(ArtifactCoords::getVersion) .findFirst() .orElse("N/A"); } }
Should we also add this change to the changelog?
Mono<PiiEntityCollection> recognizePiiEntities(String document, String language) { try { Objects.requireNonNull(document, "'document' cannot be null."); return recognizePiiEntitiesBatch( Collections.singletonList(new TextDocumentInput("0", document).setLanguage(language)), null) .map(resultCollectionResponse -> { PiiEntityCollection entityCollection = null; for (RecognizePiiEntitiesResult entitiesResult : resultCollectionResponse.getValue()) { if (entitiesResult.isError()) { throw logger.logExceptionAsError(toTextAnalyticsException(entitiesResult.getError())); } entityCollection = new PiiEntityCollection(entitiesResult.getEntities(), entitiesResult.getEntities().getRedactedText(), entitiesResult.getEntities().getWarnings()); } return entityCollection; }); } catch (RuntimeException ex) { return monoError(logger, ex); } }
throw logger.logExceptionAsError(toTextAnalyticsException(entitiesResult.getError()));
Mono<PiiEntityCollection> recognizePiiEntities(String document, String language) { try { Objects.requireNonNull(document, "'document' cannot be null."); return recognizePiiEntitiesBatch( Collections.singletonList(new TextDocumentInput("0", document).setLanguage(language)), null) .map(resultCollectionResponse -> { PiiEntityCollection entityCollection = null; for (RecognizePiiEntitiesResult entitiesResult : resultCollectionResponse.getValue()) { if (entitiesResult.isError()) { throw logger.logExceptionAsError(toTextAnalyticsException(entitiesResult.getError())); } entityCollection = new PiiEntityCollection(entitiesResult.getEntities(), entitiesResult.getEntities().getRedactedText(), entitiesResult.getEntities().getWarnings()); } return entityCollection; }); } catch (RuntimeException ex) { return monoError(logger, ex); } }
class RecognizePiiEntityAsyncClient { private final ClientLogger logger = new ClientLogger(RecognizePiiEntityAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@link RecognizePiiEntityAsyncClient} that sends requests to the Text Analytics services's * recognize Personally Identifiable Information entity endpoint. * * @param service The proxy service used to perform REST calls. */ RecognizePiiEntityAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } /** * Helper function for calling service with max overloaded parameters that returns a {@link Mono} * which contains {@link PiiEntityCollection}. * * @param document A single document. * @param language The language code. * * @return The {@link Mono} of {@link PiiEntityCollection}. */ /** * Helper function for calling service with max overloaded parameters. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatch( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options) { try { inputDocumentsValidation(documents); return withContext(context -> getRecognizePiiEntitiesResponse(documents, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Helper function for calling service with max overloaded parameters with {@link Context} is given. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWithContext( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) { try { inputDocumentsValidation(documents); return getRecognizePiiEntitiesResponse(documents, options, context); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Helper method to convert the service response of {@link EntitiesResult} to {@link Response} which contains * {@link RecognizePiiEntitiesResultCollection}. * * @param response the {@link Response} of {@link EntitiesResult} returned by the service. * * @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiEntitiesResult> response) { final PiiEntitiesResult piiEntitiesResult = response.getValue(); final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> new PiiEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore(), entity.getOffset(), entity.getLength())) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream() .map(warning -> { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); }).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } /** * Call the service with REST response, convert to a {@link Mono} of {@link Response} that contains * {@link RecognizePiiEntitiesResultCollection} from a {@link SimpleResponse} of {@link EntitiesResult}. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEntitiesResponse( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) { return service.entitiesRecognitionPiiWithResponseAsync( new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)), options == null ? null : options.getModelVersion(), options == null ? null : options.isIncludeStatistics(), null, null, context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE)) .doOnSubscribe(ignoredValue -> logger.info( "Start recognizing Personally Identifiable Information entities for a batch of documents.")) .doOnSuccess(response -> logger.info( "Successfully recognized Personally Identifiable Information entities for a batch of documents.")) .doOnError(error -> logger.warning("Failed to recognize Personally Identifiable Information entities - {}", error)) .map(this::toRecognizePiiEntitiesResultCollectionResponse) .onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable)); } }
class RecognizePiiEntityAsyncClient { private final ClientLogger logger = new ClientLogger(RecognizePiiEntityAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@link RecognizePiiEntityAsyncClient} that sends requests to the Text Analytics services's * recognize Personally Identifiable Information entity endpoint. * * @param service The proxy service used to perform REST calls. */ RecognizePiiEntityAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } /** * Helper function for calling service with max overloaded parameters that returns a {@link Mono} * which contains {@link PiiEntityCollection}. * * @param document A single document. * @param language The language code. * * @return The {@link Mono} of {@link PiiEntityCollection}. */ /** * Helper function for calling service with max overloaded parameters. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatch( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options) { try { inputDocumentsValidation(documents); return withContext(context -> getRecognizePiiEntitiesResponse(documents, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Helper function for calling service with max overloaded parameters with {@link Context} is given. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWithContext( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) { try { inputDocumentsValidation(documents); return getRecognizePiiEntitiesResponse(documents, options, context); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Helper method to convert the service response of {@link EntitiesResult} to {@link Response} which contains * {@link RecognizePiiEntitiesResultCollection}. * * @param response the {@link Response} of {@link EntitiesResult} returned by the service. * * @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiEntitiesResult> response) { final PiiEntitiesResult piiEntitiesResult = response.getValue(); final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> new PiiEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore(), entity.getOffset(), entity.getLength())) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream() .map(warning -> { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); }).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } /** * Call the service with REST response, convert to a {@link Mono} of {@link Response} that contains * {@link RecognizePiiEntitiesResultCollection} from a {@link SimpleResponse} of {@link EntitiesResult}. * * @param documents The list of documents to recognize Personally Identifiable Information entities for. * @param options The {@link TextAnalyticsRequestOptions} request options. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}. */ private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEntitiesResponse( Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) { return service.entitiesRecognitionPiiWithResponseAsync( new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)), options == null ? null : options.getModelVersion(), options == null ? null : options.isIncludeStatistics(), null, null, context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE)) .doOnSubscribe(ignoredValue -> logger.info( "Start recognizing Personally Identifiable Information entities for a batch of documents.")) .doOnSuccess(response -> logger.info( "Successfully recognized Personally Identifiable Information entities for a batch of documents.")) .doOnError(error -> logger.warning("Failed to recognize Personally Identifiable Information entities - {}", error)) .map(this::toRecognizePiiEntitiesResultCollectionResponse) .onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable)); } }
Let's open a separate issue for that, it's a bigger piece of work
public String getId() { String id = this.title.toLowerCase().replaceAll(SPACE, DASH); try { id = URLEncoder.encode(id, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException ex) { throw new RuntimeException(ex); } if (!this.isInternal() && this.namespace != null) { id = this.namespace.toLowerCase() + SLASH + id; } else if (this.isInternal() && this.namespace != null) { String d = "devui-" + id; if (d.equals(this.namespace)) { return id; } else { int i = this.namespace.indexOf(DASH) + 1; String stripDevui = this.namespace.substring(i); return stripDevui + DASH + id; } } return id; }
id = URLEncoder.encode(id, StandardCharsets.UTF_8.toString());
public String getId() { String id = this.title.toLowerCase().replaceAll(SPACE, DASH); try { id = URLEncoder.encode(id, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException ex) { throw new RuntimeException(ex); } if (!this.isInternal() && this.namespace != null) { id = this.namespace.toLowerCase() + SLASH + id; } else if (this.isInternal() && this.namespace != null) { String d = "devui-" + id; if (d.equals(this.namespace)) { return id; } else { int i = this.namespace.indexOf(DASH) + 1; String stripDevui = this.namespace.substring(i); return stripDevui + DASH + id; } } return id; }
class Page { private final String icon; private final String title; private final String staticLabel; private final String dynamicLabel; private final String streamingLabel; private final String componentName; private final String componentLink; private final Map<String, String> metadata; private final boolean embed; private final boolean includeInMenu; private final boolean internalComponent; private String namespace = null; private String namespaceLabel = null; private String extensionId = null; protected Page(String icon, String title, String staticLabel, String dynamicLabel, String streamingLabel, String componentName, String componentLink, Map<String, String> metadata, boolean embed, boolean includeInMenu, boolean internalComponent, String namespace, String namespaceLabel, String extensionId) { this.icon = icon; this.title = title; this.staticLabel = staticLabel; this.dynamicLabel = dynamicLabel; this.streamingLabel = streamingLabel; this.componentName = componentName; this.componentLink = componentLink; this.metadata = metadata; this.embed = embed; this.includeInMenu = includeInMenu; this.internalComponent = internalComponent; this.namespace = namespace; this.namespaceLabel = namespaceLabel; this.extensionId = extensionId; } public String getComponentRef() { if (internalComponent) { return DOT + SLASH + DOT + DOT + SLASH + "qwc" + SLASH + this.componentLink; } else if (this.namespace != null) { return DOT + SLASH + DOT + DOT + SLASH + this.namespace + SLASH + this.componentLink; } throw new RuntimeException("Could not find component reference"); } public String getNamespace() { return this.namespace; } public String getNamespaceLabel() { return this.namespaceLabel; } public String getIcon() { return icon; } public String getTitle() { return title; } public String getStaticLabel() { return staticLabel; } public String getDynamicLabel() { return dynamicLabel; } public String getStreamingLabel() { return streamingLabel; } public String getComponentName() { return componentName; } public String getComponentLink() { return componentLink; } public boolean isEmbed() { return embed; } public boolean isIncludeInMenu() { return includeInMenu; } public boolean isInternal() { return this.internalComponent && this.extensionId == null; } public String getExtensionId() { return extensionId; } public Map<String, String> getMetadata() { return metadata; } @Override public String toString() { return "Page {\n\tid=" + getId() + ", \n\ticon=" + icon + ", \n\ttitle=" + title + ", \n\tstaticLabel=" + staticLabel + ", \n\tdynamicLabel=" + dynamicLabel + ", \n\tstreamingLabel=" + streamingLabel + ", \n\tnamespace=" + namespace + ", \n\tnamespaceLabel=" + namespaceLabel + ", \n\tcomponentName=" + componentName + ", \n\tcomponentLink=" + componentLink + ", \n\tembed=" + embed + ", \n\tincludeInMenu=" + includeInMenu + "\n}"; } /** * Here you provide the Web Component that should be rendered. You have full control over the page. * You can use build time data if you made it available */ public static WebComponentPageBuilder webComponentPageBuilder() { return new WebComponentPageBuilder(); } /** * Here you provide a url to an external resource. When code/markup, if can be displayed in a code view, when HTML it can * render the HTML */ public static ExternalPageBuilder externalPageBuilder(String name) { return new ExternalPageBuilder(name); } /** * Here you provide the data that should be rendered in raw json format */ public static RawDataPageBuilder rawDataPageBuilder(String name) { return new RawDataPageBuilder(name); } /** * Here you can render the data with a qute template */ public static QuteDataPageBuilder quteDataPageBuilder(String name) { return new QuteDataPageBuilder(name); } /** * Here you provide the data that should be rendered in a table */ public static TableDataPageBuilder tableDataPageBuilder(String name) { return new TableDataPageBuilder(name); } private static final String SPACE = " "; private static final String DASH = "-"; private static final String SLASH = "/"; private static final String DOT = "."; }
class Page { private final String icon; private final String title; private final String staticLabel; private final String dynamicLabel; private final String streamingLabel; private final String componentName; private final String componentLink; private final Map<String, String> metadata; private final boolean embed; private final boolean includeInMenu; private final boolean internalComponent; private String namespace = null; private String namespaceLabel = null; private String extensionId = null; protected Page(String icon, String title, String staticLabel, String dynamicLabel, String streamingLabel, String componentName, String componentLink, Map<String, String> metadata, boolean embed, boolean includeInMenu, boolean internalComponent, String namespace, String namespaceLabel, String extensionId) { this.icon = icon; this.title = title; this.staticLabel = staticLabel; this.dynamicLabel = dynamicLabel; this.streamingLabel = streamingLabel; this.componentName = componentName; this.componentLink = componentLink; this.metadata = metadata; this.embed = embed; this.includeInMenu = includeInMenu; this.internalComponent = internalComponent; this.namespace = namespace; this.namespaceLabel = namespaceLabel; this.extensionId = extensionId; } public String getComponentRef() { if (internalComponent) { return DOT + SLASH + DOT + DOT + SLASH + "qwc" + SLASH + this.componentLink; } else if (this.namespace != null) { return DOT + SLASH + DOT + DOT + SLASH + this.namespace + SLASH + this.componentLink; } throw new RuntimeException("Could not find component reference"); } public String getNamespace() { return this.namespace; } public String getNamespaceLabel() { return this.namespaceLabel; } public String getIcon() { return icon; } public String getTitle() { return title; } public String getStaticLabel() { return staticLabel; } public String getDynamicLabel() { return dynamicLabel; } public String getStreamingLabel() { return streamingLabel; } public String getComponentName() { return componentName; } public String getComponentLink() { return componentLink; } public boolean isEmbed() { return embed; } public boolean isIncludeInMenu() { return includeInMenu; } public boolean isInternal() { return this.internalComponent && this.extensionId == null; } public String getExtensionId() { return extensionId; } public Map<String, String> getMetadata() { return metadata; } @Override public String toString() { return "Page {\n\tid=" + getId() + ", \n\ticon=" + icon + ", \n\ttitle=" + title + ", \n\tstaticLabel=" + staticLabel + ", \n\tdynamicLabel=" + dynamicLabel + ", \n\tstreamingLabel=" + streamingLabel + ", \n\tnamespace=" + namespace + ", \n\tnamespaceLabel=" + namespaceLabel + ", \n\tcomponentName=" + componentName + ", \n\tcomponentLink=" + componentLink + ", \n\tembed=" + embed + ", \n\tincludeInMenu=" + includeInMenu + "\n}"; } /** * Here you provide the Web Component that should be rendered. You have full control over the page. * You can use build time data if you made it available */ public static WebComponentPageBuilder webComponentPageBuilder() { return new WebComponentPageBuilder(); } /** * Here you provide a url to an external resource. When code/markup, if can be displayed in a code view, when HTML it can * render the HTML */ public static ExternalPageBuilder externalPageBuilder(String name) { return new ExternalPageBuilder(name); } /** * Here you provide the data that should be rendered in raw json format */ public static RawDataPageBuilder rawDataPageBuilder(String name) { return new RawDataPageBuilder(name); } /** * Here you can render the data with a qute template */ public static QuteDataPageBuilder quteDataPageBuilder(String name) { return new QuteDataPageBuilder(name); } /** * Here you provide the data that should be rendered in a table */ public static TableDataPageBuilder tableDataPageBuilder(String name) { return new TableDataPageBuilder(name); } private static final String SPACE = " "; private static final String DASH = "-"; private static final String SLASH = "/"; private static final String DOT = "."; }
Also, if we can, don't forget to update `AtomicCtasITCase`.
protected TableEnvironment getTableEnvironment() { EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build(); return StreamTableEnvironment.create( StreamExecutionEnvironment.getExecutionEnvironment(), settings); }
return StreamTableEnvironment.create(
protected TableEnvironment getTableEnvironment() { EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build(); return StreamTableEnvironment.create( StreamExecutionEnvironment.getExecutionEnvironment(), settings); }
class AtomicRtasITCase extends AtomicRtasITCaseBase { @Override }
class AtomicRtasITCase extends AtomicRtasITCaseBase { @Override }
Let's keep this since there are multiple variables assigned inside the logic.
public void visit(TypeDefinitionNode typeDefinitionNode) { int type = TokenTypes.TYPE.getId(); int modifiers = 0; int refModifiers = 0; Node typeDescriptor = typeDefinitionNode.typeDescriptor(); switch (typeDescriptor.kind()) { case OBJECT_TYPE_DESC: type = TokenTypes.INTERFACE.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; case RECORD_TYPE_DESC: type = TokenTypes.STRUCT.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; case INTERSECTION_TYPE_DESC: if (typeDescriptor instanceof IntersectionTypeDescriptorNode) { IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) typeDescriptor; SyntaxKind left = intSecDescriptor.leftTypeDesc().kind(); SyntaxKind right = intSecDescriptor.rightTypeDesc().kind(); if (left == SyntaxKind.RECORD_TYPE_DESC || right == SyntaxKind.RECORD_TYPE_DESC) { type = TokenTypes.STRUCT.getId(); } if (left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); } } break; default: type = TokenTypes.TYPE.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; } this.addSemanticToken(typeDefinitionNode.typeName(), type, modifiers, true, type, refModifiers); visitSyntaxNode(typeDefinitionNode); }
refModifiers = TokenTypeModifiers.READONLY.getId();
public void visit(TypeDefinitionNode typeDefinitionNode) { int type = TokenTypes.TYPE.getId(); int modifiers = 0; int refModifiers = 0; Node typeDescriptor = typeDefinitionNode.typeDescriptor(); switch (typeDescriptor.kind()) { case OBJECT_TYPE_DESC: type = TokenTypes.INTERFACE.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; case RECORD_TYPE_DESC: type = TokenTypes.STRUCT.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; case INTERSECTION_TYPE_DESC: if (typeDescriptor instanceof IntersectionTypeDescriptorNode) { IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) typeDescriptor; SyntaxKind left = intSecDescriptor.leftTypeDesc().kind(); SyntaxKind right = intSecDescriptor.rightTypeDesc().kind(); if (left == SyntaxKind.RECORD_TYPE_DESC || right == SyntaxKind.RECORD_TYPE_DESC) { type = TokenTypes.STRUCT.getId(); } if (left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); } } break; default: type = TokenTypes.TYPE.getId(); modifiers = TokenTypeModifiers.DECLARATION.getId(); break; } this.addSemanticToken(typeDefinitionNode.typeName(), type, modifiers, true, type, refModifiers); visitSyntaxNode(typeDefinitionNode); }
class SemanticTokensVisitor extends NodeVisitor { private final Set<SemanticToken> semanticTokens; private final SemanticTokensContext semanticTokensContext; public SemanticTokensVisitor(SemanticTokensContext semanticTokensContext) { this.semanticTokens = new TreeSet<>(SemanticToken.semanticTokenComparator); this.semanticTokensContext = semanticTokensContext; } /** * Collects semantic tokens while traversing the semantic tress and returns the processed list of semantic tokens * for highlighting. * * @param node Root node * @return {@link SemanticTokens} */ public SemanticTokens getSemanticTokens(Node node) { List<Integer> data = new ArrayList<>(); visitSyntaxNode(node); SemanticToken previousToken = null; for (SemanticToken semanticToken : this.semanticTokens) { previousToken = semanticToken.processSemanticToken(data, previousToken); } return new SemanticTokens(data); } public void visit(ImportDeclarationNode importDeclarationNode) { Optional<ImportPrefixNode> importPrefixNode = importDeclarationNode.prefix(); importPrefixNode.ifPresent(prefixNode -> this.addSemanticToken(prefixNode.prefix(), TokenTypes.NAMESPACE.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.NAMESPACE.getId(), 0)); } public void visit(FunctionDefinitionNode functionDefinitionNode) { int type = functionDefinitionNode.kind() == SyntaxKind.OBJECT_METHOD_DEFINITION ? TokenTypes.METHOD.getId() : TokenTypes.FUNCTION.getId(); if (functionDefinitionNode.kind() == SyntaxKind.RESOURCE_ACCESSOR_DEFINITION) { this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); functionDefinitionNode.relativeResourcePath().forEach(resourcePath -> { if (resourcePath.kind() == SyntaxKind.IDENTIFIER_TOKEN) { this.addSemanticToken(resourcePath, type, TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); } }); } else { this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(), true, type, 0); } visitSyntaxNode(functionDefinitionNode); } public void visit(MethodDeclarationNode methodDeclarationNode) { this.addSemanticToken(methodDeclarationNode.methodName(), TokenTypes.METHOD.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(methodDeclarationNode); } public void visit(FunctionCallExpressionNode functionCallExpressionNode) { Node functionName = functionCallExpressionNode.functionName(); if (functionName instanceof QualifiedNameReferenceNode) { functionName = ((QualifiedNameReferenceNode) functionName).identifier(); } this.addSemanticToken(functionName, TokenTypes.FUNCTION.getId(), 0, false, -1, -1); visitSyntaxNode(functionCallExpressionNode); } public void visit(MethodCallExpressionNode methodCallExpressionNode) { this.addSemanticToken(methodCallExpressionNode.methodName(), TokenTypes.METHOD.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(methodCallExpressionNode); } public void visit(RequiredParameterNode requiredParameterNode) { boolean isReadonly = isReadonly(requiredParameterNode.typeName()); requiredParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0)); visitSyntaxNode(requiredParameterNode); } @Override public void visit(TypedBindingPatternNode typedBindingPatternNode) { TypeDescriptorNode typeDescriptorNode = typedBindingPatternNode.typeDescriptor(); processSymbols(typeDescriptorNode, typeDescriptorNode.lineRange().startLine()); visitSyntaxNode(typedBindingPatternNode); } public void visit(CaptureBindingPatternNode captureBindingPatternNode) { boolean readonly = false; if (captureBindingPatternNode.parent() instanceof TypedBindingPatternNode) { readonly = this.isReadonly(((TypedBindingPatternNode) captureBindingPatternNode.parent()).typeDescriptor()); } this.addSemanticToken(captureBindingPatternNode, TokenTypes.VARIABLE.getId(), readonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.VARIABLE.getId(), readonly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(captureBindingPatternNode); } public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { if (!SemanticTokensConstants.SELF.equals(simpleNameReferenceNode.name().text())) { processSymbols(simpleNameReferenceNode, simpleNameReferenceNode.lineRange().startLine()); } visitSyntaxNode(simpleNameReferenceNode); } public void visit(QualifiedNameReferenceNode qualifiedNameReferenceNode) { this.addSemanticToken(qualifiedNameReferenceNode.modulePrefix(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1); Token identifier = qualifiedNameReferenceNode.identifier(); processSymbols(identifier, identifier.lineRange().startLine()); visitSyntaxNode(qualifiedNameReferenceNode); } public void visit(ConstantDeclarationNode constantDeclarationNode) { this.addSemanticToken(constantDeclarationNode.variableName(), TokenTypes.VARIABLE.getId(), TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true, TokenTypes.VARIABLE.getId(), TokenTypeModifiers.READONLY.getId()); visitSyntaxNode(constantDeclarationNode); } public void visit(ClassDefinitionNode classDefinitionNode) { boolean isReadonly = false; if (!classDefinitionNode.classTypeQualifiers().isEmpty() && classDefinitionNode.classTypeQualifiers().stream().anyMatch(qualifier -> qualifier.text().equals(SemanticTokensConstants.READONLY))) { isReadonly = true; } this.addSemanticToken(classDefinitionNode.className(), TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(classDefinitionNode); } public void visit(ServiceDeclarationNode serviceDeclarationNode) { serviceDeclarationNode.absoluteResourcePath().forEach(serviceName -> { LinePosition startLine = serviceName.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset(), serviceName.textRange().length(), TokenTypes.TYPE.getId(), TokenTypeModifiers.DECLARATION.getId()); semanticTokens.add(semanticToken); }); visitSyntaxNode(serviceDeclarationNode); } public void visit(EnumDeclarationNode enumDeclarationNode) { this.addSemanticToken(enumDeclarationNode.identifier(), TokenTypes.ENUM.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.ENUM.getId(), 0); visitSyntaxNode(enumDeclarationNode); } public void visit(EnumMemberNode enumMemberNode) { this.addSemanticToken(enumMemberNode.identifier(), TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true, TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.READONLY.getId()); visitSyntaxNode(enumMemberNode); } public void visit(AnnotationNode annotationNode) { this.addSemanticToken(annotationNode.atToken(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1); visitSyntaxNode(annotationNode); } public void visit(MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) { if (!markdownParameterDocumentationLineNode.parameterName().text().equals(SemanticTokensConstants.RETURN)) { int type; switch (markdownParameterDocumentationLineNode.parent().parent().parent().kind()) { case RECORD_FIELD: case OBJECT_FIELD: type = TokenTypes.PROPERTY.getId(); break; case TYPE_DEFINITION: Node node = markdownParameterDocumentationLineNode.parent().parent().parent(); type = TokenTypes.TYPE_PARAMETER.getId(); if (node instanceof TypeDefinitionNode) { SyntaxKind kind = ((TypeDefinitionNode) node).typeDescriptor().kind(); if (kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC) { type = TokenTypes.PROPERTY.getId(); } } break; default: type = TokenTypes.PARAMETER.getId(); break; } this.addSemanticToken(markdownParameterDocumentationLineNode.parameterName(), type, TokenTypeModifiers.DOCUMENTATION.getId(), false, -1, -1); } visitSyntaxNode(markdownParameterDocumentationLineNode); } public void visit(RecordFieldNode recordFieldNode) { Token token = recordFieldNode.fieldName(); LinePosition startLine = token.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = token.text().trim().length(); int modifiers; int refModifiers; if (recordFieldNode.readonlyKeyword().isPresent()) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); refModifiers = 0; } semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers); semanticTokens.add(semanticToken); handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers); } visitSyntaxNode(recordFieldNode); } public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { Token token = recordFieldWithDefaultValueNode.fieldName(); LinePosition startLine = token.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = token.text().trim().length(); int modifiers; int refModifiers; if (recordFieldWithDefaultValueNode.readonlyKeyword().isPresent()) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); refModifiers = 0; } semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers); semanticTokens.add(semanticToken); handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers); } visitSyntaxNode(recordFieldWithDefaultValueNode); } public void visit(KeySpecifierNode keySpecifierNode) { keySpecifierNode.fieldNames().forEach(field -> this.addSemanticToken(field, TokenTypes.PROPERTY.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1)); visitSyntaxNode(keySpecifierNode); } public void visit(SpecificFieldNode specificFieldNode) { processSymbols(specificFieldNode.fieldName(), specificFieldNode.fieldName().location().lineRange().startLine()); visitSyntaxNode(specificFieldNode); } public void visit(ObjectFieldNode objectFieldNode) { SyntaxKind kind = objectFieldNode.parent().kind(); int type = kind == SyntaxKind.CLASS_DEFINITION || kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC || kind == SyntaxKind.OBJECT_CONSTRUCTOR ? TokenTypes.PROPERTY.getId() : TokenTypes.TYPE_PARAMETER.getId(); boolean isReadOnly = isReadonly(objectFieldNode.typeName()); this.addSemanticToken(objectFieldNode.fieldName(), type, isReadOnly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, type, isReadOnly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(objectFieldNode); } public void visit(AnnotationDeclarationNode annotationDeclarationNode) { this.addSemanticToken(annotationDeclarationNode.annotationTag(), TokenTypes.TYPE.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.TYPE.getId(), 0); visitSyntaxNode(annotationDeclarationNode); } public void visit(DefaultableParameterNode defaultableParameterNode) { defaultableParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(defaultableParameterNode); } public void visit(IncludedRecordParameterNode includedRecordParameterNode) { includedRecordParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(includedRecordParameterNode); } public void visit(RestParameterNode restParameterNode) { restParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(restParameterNode); } public void visit(NamedArgumentNode namedArgumentNode) { this.addSemanticToken(namedArgumentNode.argumentName(), TokenTypes.VARIABLE.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(namedArgumentNode); } /** * Returns if the given IntersectionTypeDescriptorNode has a readonly typeDescriptor. * * @param node Current node * @return True if a readonly typeDescriptor is present, false otherwise. */ private boolean isReadonly(Node node) { if (node instanceof IntersectionTypeDescriptorNode) { IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) node; SyntaxKind left = intSecDescriptor.leftTypeDesc().kind(); SyntaxKind right = intSecDescriptor.rightTypeDesc().kind(); return left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC; } return false; } /** * Get the symbol of the given node and process the semantic tokens for the symbol and it's references. * * @param node Current node * @param startLine Start line position */ private void processSymbols(Node node, LinePosition startLine) { if (semanticTokens.contains(new SemanticToken(startLine.line(), startLine.offset()))) { return; } Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel(); if (semanticModel.isEmpty()) { return; } Optional<Symbol> symbol = semanticModel.get().symbol(node); if (symbol.isEmpty() || symbol.get().getLocation().isEmpty()) { return; } LineRange symbolLineRange = symbol.get().getLocation().get().lineRange(); LinePosition linePosition = symbolLineRange.startLine(); SymbolKind kind = symbol.get().kind(); String nodeName = node.toString().trim(); if (nodeName.equals(SemanticTokensConstants.SELF)) { return; } int declarationType = -1, declarationModifiers = -1, referenceType = -1, referenceModifiers = -1; switch (kind) { case CLASS: declarationType = TokenTypes.CLASS.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.CLASS.getId(); break; case CLASS_FIELD: declarationType = TokenTypes.PROPERTY.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.PROPERTY.getId(); break; case CONSTANT: declarationType = TokenTypes.VARIABLE.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceType = TokenTypes.VARIABLE.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); break; case VARIABLE: boolean isReadonly = ((VariableSymbol) symbol.get()).typeDescriptor().typeKind() == TypeDescKind.INTERSECTION && ((IntersectionTypeSymbol) ((VariableSymbol) symbol.get()) .typeDescriptor()).memberTypeDescriptors().stream() .anyMatch(desc -> desc.typeKind() == TypeDescKind.READONLY); declarationType = TokenTypes.VARIABLE.getId(); declarationModifiers = isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.VARIABLE.getId(); referenceModifiers = isReadonly ? TokenTypeModifiers.READONLY.getId() : 0; break; case TYPE: if (symbol.get() instanceof TypeReferenceTypeSymbol) { TypeSymbol typeDescriptor = ((TypeReferenceTypeSymbol) symbol.get()).typeDescriptor(); int type = TokenTypes.TYPE.getId(); switch (typeDescriptor.kind()) { case CLASS: type = TokenTypes.CLASS.getId(); if (typeDescriptor instanceof ClassSymbol && ((ClassSymbol) typeDescriptor).qualifiers().contains(Qualifier.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case TYPE: switch (typeDescriptor.typeKind()) { case RECORD: type = TokenTypes.STRUCT.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); break; case OBJECT: type = TokenTypes.INTERFACE.getId(); break; case INTERSECTION: IntersectionTypeSymbol intSecSymbol = (IntersectionTypeSymbol) typeDescriptor; if (intSecSymbol.effectiveTypeDescriptor().typeKind() == TypeDescKind.RECORD) { type = TokenTypes.STRUCT.getId(); if (intSecSymbol.memberTypeDescriptors().stream().anyMatch(desc -> desc.typeKind() == TypeDescKind.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } } break; case UNION: if (((TypeReferenceTypeSymbol) symbol.get()).definition().kind() == SymbolKind.ENUM) { type = TokenTypes.ENUM.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; default: type = TokenTypes.TYPE.getId(); break; } break; default: type = TokenTypes.TYPE.getId(); break; } declarationType = type; referenceType = type; } else { declarationType = TokenTypes.TYPE.getId(); referenceType = TokenTypes.TYPE.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case RECORD_FIELD: declarationType = TokenTypes.PROPERTY.getId(); referenceType = TokenTypes.PROPERTY.getId(); if (symbol.get() instanceof RecordFieldSymbol && ((RecordFieldSymbol) symbol.get()).qualifiers().contains(Qualifier.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case ENUM_MEMBER: declarationType = TokenTypes.ENUM_MEMBER.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceType = TokenTypes.ENUM_MEMBER.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); break; case FUNCTION: declarationType = TokenTypes.FUNCTION.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.FUNCTION.getId(); break; case METHOD: declarationType = TokenTypes.METHOD.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.METHOD.getId(); break; case ANNOTATION: this.addSemanticToken(node, TokenTypes.TYPE.getId(), 0, false, -1, -1); break; default: break; } int length = node.textRange().length(); if (declarationType != -1) { Optional<ModuleSymbol> moduleSymbol = symbol.get().getModule(); if (symbolLineRange.filePath().equals(this.semanticTokensContext.currentDocument().get().name()) && moduleSymbol.isPresent() && moduleSymbol.get().getName().isPresent() && this.semanticTokensContext.currentModule().isPresent() && moduleSymbol.get().getName().get() .equals(this.semanticTokensContext.currentModule().get().moduleId().moduleName())) { SemanticToken semanticToken = new SemanticToken(linePosition.line(), linePosition.offset()); if (!semanticTokens.contains(semanticToken)) { semanticToken.setProperties(length, declarationType, declarationModifiers == -1 ? 0 : declarationModifiers); semanticTokens.add(semanticToken); } } } if (referenceType != -1) { final int type = referenceType; final int modifiers = referenceModifiers == -1 ? 0 : referenceModifiers; if (node.kind() == SyntaxKind.STRING_LITERAL) { return; } List<Location> locations = semanticModel.get().references(symbol.get(), this.semanticTokensContext.currentDocument().get(), false); locations.stream().filter(location -> location.lineRange().filePath() .equals(this.semanticTokensContext.currentDocument().get().name())).forEach(location -> { LinePosition position = location.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(position.line(), position.offset()); if (!semanticTokens.contains(semanticToken) && location.textRange().length() == length) { semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); } }); } } /** * Adds a semantic token instance into the semanticTokens set for the given node. * * @param node Current node * @param type Semantic token type's index * @param modifiers Semantic token type modifiers' index * @param processReferences True if node references should be processed, false otherwise * @param refType Reference's semantic token type's index * @param refModifiers Reference's semantic token type modifiers' index */ private void addSemanticToken(Node node, int type, int modifiers, boolean processReferences, int refType, int refModifiers) { LinePosition startLine = node.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = node instanceof Token ? ((Token) node).text().trim().length() : node.textRange().length(); semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); if (processReferences) { handleReferences(startLine, length, refType, refModifiers); } } } /** * Handles references of the node that is located in the given position. * * @param linePosition Start position of the node * @param length Length to highlight * @param type Semantic token type's index * @param modifiers Semantic token type modifiers' index */ private void handleReferences(LinePosition linePosition, int length, int type, int modifiers) { Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel(); if (semanticModel.isEmpty()) { return; } Document document = this.semanticTokensContext.currentDocument().get(); List<Location> locations = semanticModel.get().references(document, document, linePosition, false); locations.stream().filter(location -> location.lineRange().filePath().equals(document.name())).forEach(location -> { LinePosition position = location.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(position.line(), position.offset()); if (!semanticTokens.contains(semanticToken) && location.textRange().length() == length) { semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); } }); } /** * Represents semantic token data for a node. */ static class SemanticToken implements Comparable<SemanticToken> { private final int line; private final int column; private int length; private int type; private int modifiers; private SemanticToken(int line, int column) { this.line = line; this.column = column; } private SemanticToken(int line, int column, int length, int type, int modifiers) { this.line = line; this.column = column; this.length = length; this.type = type; this.modifiers = modifiers; } private int getLine() { return line; } private int getColumn() { return column; } private int getLength() { return length; } private int getType() { return type; } private int getModifiers() { return modifiers; } public void setProperties(int length, int type, int modifiers) { this.length = length; this.type = type; this.modifiers = modifiers; } public SemanticToken processSemanticToken(List<Integer> data, SemanticToken previousToken) { int line = this.getLine(); int column = this.getColumn(); int prevTokenLine = line; int prevTokenColumn = column; if (previousToken != null) { if (line == previousToken.getLine()) { column -= previousToken.getColumn(); } line -= previousToken.getLine(); } data.add(line); data.add(column); data.add(this.getLength()); data.add(this.getType()); data.add(this.getModifiers()); return new SemanticToken(prevTokenLine, prevTokenColumn); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } SemanticToken semanticToken = (SemanticToken) obj; return line == semanticToken.line && column == semanticToken.column; } @Override public int hashCode() { return Objects.hash(line, column); } @Override public int compareTo(SemanticToken semanticToken) { if (this.line == semanticToken.line) { return this.column - semanticToken.column; } return this.line - semanticToken.line; } public static Comparator<SemanticToken> semanticTokenComparator = SemanticToken::compareTo; } }
class SemanticTokensVisitor extends NodeVisitor { private final Set<SemanticToken> semanticTokens; private final SemanticTokensContext semanticTokensContext; public SemanticTokensVisitor(SemanticTokensContext semanticTokensContext) { this.semanticTokens = new TreeSet<>(SemanticToken.semanticTokenComparator); this.semanticTokensContext = semanticTokensContext; } /** * Collects semantic tokens while traversing the semantic tress and returns the processed list of semantic tokens * for highlighting. * * @param node Root node * @return {@link SemanticTokens} */ public SemanticTokens getSemanticTokens(Node node) { List<Integer> data = new ArrayList<>(); visitSyntaxNode(node); SemanticToken previousToken = null; for (SemanticToken semanticToken : this.semanticTokens) { previousToken = semanticToken.processSemanticToken(data, previousToken); } return new SemanticTokens(data); } public void visit(ImportDeclarationNode importDeclarationNode) { Optional<ImportPrefixNode> importPrefixNode = importDeclarationNode.prefix(); importPrefixNode.ifPresent(prefixNode -> this.addSemanticToken(prefixNode.prefix(), TokenTypes.NAMESPACE.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.NAMESPACE.getId(), 0)); } public void visit(FunctionDefinitionNode functionDefinitionNode) { int type = functionDefinitionNode.kind() == SyntaxKind.OBJECT_METHOD_DEFINITION ? TokenTypes.METHOD.getId() : TokenTypes.FUNCTION.getId(); if (functionDefinitionNode.kind() == SyntaxKind.RESOURCE_ACCESSOR_DEFINITION) { this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); functionDefinitionNode.relativeResourcePath().stream() .filter(resourcePath -> resourcePath.kind() == SyntaxKind.IDENTIFIER_TOKEN) .forEach(resourcePath -> { this.addSemanticToken(resourcePath, type, TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); }); } else { this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(), true, type, 0); } visitSyntaxNode(functionDefinitionNode); } public void visit(MethodDeclarationNode methodDeclarationNode) { this.addSemanticToken(methodDeclarationNode.methodName(), TokenTypes.METHOD.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(methodDeclarationNode); } public void visit(FunctionCallExpressionNode functionCallExpressionNode) { Node functionName = functionCallExpressionNode.functionName(); if (functionName instanceof QualifiedNameReferenceNode) { functionName = ((QualifiedNameReferenceNode) functionName).identifier(); } this.addSemanticToken(functionName, TokenTypes.FUNCTION.getId(), 0, false, -1, -1); visitSyntaxNode(functionCallExpressionNode); } public void visit(MethodCallExpressionNode methodCallExpressionNode) { this.addSemanticToken(methodCallExpressionNode.methodName(), TokenTypes.METHOD.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(methodCallExpressionNode); } public void visit(RequiredParameterNode requiredParameterNode) { boolean isReadonly = isReadonly(requiredParameterNode.typeName()); requiredParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0)); visitSyntaxNode(requiredParameterNode); } @Override public void visit(TypedBindingPatternNode typedBindingPatternNode) { TypeDescriptorNode typeDescriptorNode = typedBindingPatternNode.typeDescriptor(); processSymbols(typeDescriptorNode, typeDescriptorNode.lineRange().startLine()); visitSyntaxNode(typedBindingPatternNode); } public void visit(CaptureBindingPatternNode captureBindingPatternNode) { boolean readonly = false; if (captureBindingPatternNode.parent() instanceof TypedBindingPatternNode) { readonly = this.isReadonly(((TypedBindingPatternNode) captureBindingPatternNode.parent()).typeDescriptor()); } this.addSemanticToken(captureBindingPatternNode, TokenTypes.VARIABLE.getId(), readonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.VARIABLE.getId(), readonly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(captureBindingPatternNode); } public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { if (!SemanticTokensConstants.SELF.equals(simpleNameReferenceNode.name().text())) { processSymbols(simpleNameReferenceNode, simpleNameReferenceNode.lineRange().startLine()); } visitSyntaxNode(simpleNameReferenceNode); } public void visit(QualifiedNameReferenceNode qualifiedNameReferenceNode) { this.addSemanticToken(qualifiedNameReferenceNode.modulePrefix(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1); Token identifier = qualifiedNameReferenceNode.identifier(); processSymbols(identifier, identifier.lineRange().startLine()); visitSyntaxNode(qualifiedNameReferenceNode); } public void visit(ConstantDeclarationNode constantDeclarationNode) { this.addSemanticToken(constantDeclarationNode.variableName(), TokenTypes.VARIABLE.getId(), TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true, TokenTypes.VARIABLE.getId(), TokenTypeModifiers.READONLY.getId()); visitSyntaxNode(constantDeclarationNode); } public void visit(ClassDefinitionNode classDefinitionNode) { boolean isReadonly = false; if (!classDefinitionNode.classTypeQualifiers().isEmpty() && classDefinitionNode.classTypeQualifiers().stream().anyMatch(qualifier -> qualifier.text().equals(SemanticTokensConstants.READONLY))) { isReadonly = true; } this.addSemanticToken(classDefinitionNode.className(), TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(classDefinitionNode); } public void visit(ServiceDeclarationNode serviceDeclarationNode) { serviceDeclarationNode.absoluteResourcePath().forEach(serviceName -> { LinePosition startLine = serviceName.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset(), serviceName.textRange().length(), TokenTypes.TYPE.getId(), TokenTypeModifiers.DECLARATION.getId()); semanticTokens.add(semanticToken); }); visitSyntaxNode(serviceDeclarationNode); } public void visit(EnumDeclarationNode enumDeclarationNode) { this.addSemanticToken(enumDeclarationNode.identifier(), TokenTypes.ENUM.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.ENUM.getId(), 0); visitSyntaxNode(enumDeclarationNode); } public void visit(EnumMemberNode enumMemberNode) { this.addSemanticToken(enumMemberNode.identifier(), TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true, TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.READONLY.getId()); visitSyntaxNode(enumMemberNode); } public void visit(AnnotationNode annotationNode) { this.addSemanticToken(annotationNode.atToken(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1); visitSyntaxNode(annotationNode); } public void visit(MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) { if (!markdownParameterDocumentationLineNode.parameterName().text().equals(SemanticTokensConstants.RETURN)) { int type; switch (markdownParameterDocumentationLineNode.parent().parent().parent().kind()) { case RECORD_FIELD: case OBJECT_FIELD: type = TokenTypes.PROPERTY.getId(); break; case TYPE_DEFINITION: Node node = markdownParameterDocumentationLineNode.parent().parent().parent(); type = TokenTypes.TYPE_PARAMETER.getId(); if (node instanceof TypeDefinitionNode) { SyntaxKind kind = ((TypeDefinitionNode) node).typeDescriptor().kind(); if (kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC) { type = TokenTypes.PROPERTY.getId(); } } break; default: type = TokenTypes.PARAMETER.getId(); break; } this.addSemanticToken(markdownParameterDocumentationLineNode.parameterName(), type, TokenTypeModifiers.DOCUMENTATION.getId(), false, -1, -1); } visitSyntaxNode(markdownParameterDocumentationLineNode); } public void visit(RecordFieldNode recordFieldNode) { Token token = recordFieldNode.fieldName(); LinePosition startLine = token.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = token.text().trim().length(); int modifiers; int refModifiers; if (recordFieldNode.readonlyKeyword().isPresent()) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); refModifiers = 0; } semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers); semanticTokens.add(semanticToken); handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers); } visitSyntaxNode(recordFieldNode); } public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { Token token = recordFieldWithDefaultValueNode.fieldName(); LinePosition startLine = token.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = token.text().trim().length(); int modifiers; int refModifiers; if (recordFieldWithDefaultValueNode.readonlyKeyword().isPresent()) { modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); refModifiers = TokenTypeModifiers.READONLY.getId(); } else { modifiers = TokenTypeModifiers.DECLARATION.getId(); refModifiers = 0; } semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers); semanticTokens.add(semanticToken); handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers); } visitSyntaxNode(recordFieldWithDefaultValueNode); } public void visit(KeySpecifierNode keySpecifierNode) { keySpecifierNode.fieldNames().forEach(field -> this.addSemanticToken(field, TokenTypes.PROPERTY.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1)); visitSyntaxNode(keySpecifierNode); } public void visit(SpecificFieldNode specificFieldNode) { processSymbols(specificFieldNode.fieldName(), specificFieldNode.fieldName().location().lineRange().startLine()); visitSyntaxNode(specificFieldNode); } public void visit(ObjectFieldNode objectFieldNode) { SyntaxKind kind = objectFieldNode.parent().kind(); int type = kind == SyntaxKind.CLASS_DEFINITION || kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC || kind == SyntaxKind.OBJECT_CONSTRUCTOR ? TokenTypes.PROPERTY.getId() : kind == SyntaxKind.SERVICE_DECLARATION ? TokenTypes.VARIABLE.getId() : TokenTypes.TYPE_PARAMETER.getId(); boolean isReadOnly = isReadonly(objectFieldNode.typeName()); this.addSemanticToken(objectFieldNode.fieldName(), type, isReadOnly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, type, isReadOnly ? TokenTypeModifiers.READONLY.getId() : 0); visitSyntaxNode(objectFieldNode); } public void visit(AnnotationDeclarationNode annotationDeclarationNode) { this.addSemanticToken(annotationDeclarationNode.annotationTag(), TokenTypes.TYPE.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.TYPE.getId(), 0); visitSyntaxNode(annotationDeclarationNode); } public void visit(DefaultableParameterNode defaultableParameterNode) { defaultableParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(defaultableParameterNode); } public void visit(IncludedRecordParameterNode includedRecordParameterNode) { includedRecordParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(includedRecordParameterNode); } public void visit(RestParameterNode restParameterNode) { restParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0)); visitSyntaxNode(restParameterNode); } public void visit(NamedArgumentNode namedArgumentNode) { this.addSemanticToken(namedArgumentNode.argumentName(), TokenTypes.VARIABLE.getId(), TokenTypeModifiers.DECLARATION.getId(), false, -1, -1); visitSyntaxNode(namedArgumentNode); } /** * Returns if the given IntersectionTypeDescriptorNode has a readonly typeDescriptor. * * @param node Current node * @return True if a readonly typeDescriptor is present, false otherwise. */ private boolean isReadonly(Node node) { if (node instanceof IntersectionTypeDescriptorNode) { IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) node; SyntaxKind left = intSecDescriptor.leftTypeDesc().kind(); SyntaxKind right = intSecDescriptor.rightTypeDesc().kind(); return left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC; } return false; } /** * Get the symbol of the given node and process the semantic tokens for the symbol and it's references. * * @param node Current node * @param startLine Start line position */ private void processSymbols(Node node, LinePosition startLine) { if (node.kind() == SyntaxKind.STRING_LITERAL || semanticTokens.contains(new SemanticToken(startLine.line(), startLine.offset()))) { return; } Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel(); if (semanticModel.isEmpty()) { return; } Optional<Symbol> symbol = semanticModel.get().symbol(node); if (symbol.isEmpty() || symbol.get().getLocation().isEmpty()) { return; } LineRange symbolLineRange = symbol.get().getLocation().get().lineRange(); LinePosition linePosition = symbolLineRange.startLine(); SymbolKind kind = symbol.get().kind(); String nodeName = node.toString().trim(); if (nodeName.equals(SemanticTokensConstants.SELF)) { return; } int declarationType = -1, declarationModifiers = -1, referenceType = -1, referenceModifiers = -1; switch (kind) { case CLASS: declarationType = TokenTypes.CLASS.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.CLASS.getId(); break; case CLASS_FIELD: declarationType = TokenTypes.PROPERTY.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.PROPERTY.getId(); break; case CONSTANT: declarationType = TokenTypes.VARIABLE.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceType = TokenTypes.VARIABLE.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); break; case VARIABLE: boolean isReadonly = ((VariableSymbol) symbol.get()).typeDescriptor().typeKind() == TypeDescKind.INTERSECTION && ((IntersectionTypeSymbol) ((VariableSymbol) symbol.get()) .typeDescriptor()).memberTypeDescriptors().stream() .anyMatch(desc -> desc.typeKind() == TypeDescKind.READONLY); declarationType = TokenTypes.VARIABLE.getId(); declarationModifiers = isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.VARIABLE.getId(); referenceModifiers = isReadonly ? TokenTypeModifiers.READONLY.getId() : 0; break; case TYPE: if (symbol.get() instanceof TypeReferenceTypeSymbol) { TypeSymbol typeDescriptor = ((TypeReferenceTypeSymbol) symbol.get()).typeDescriptor(); int type = TokenTypes.TYPE.getId(); switch (typeDescriptor.kind()) { case CLASS: type = TokenTypes.CLASS.getId(); if (typeDescriptor instanceof ClassSymbol && ((ClassSymbol) typeDescriptor).qualifiers().contains(Qualifier.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case TYPE: switch (typeDescriptor.typeKind()) { case RECORD: type = TokenTypes.STRUCT.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); break; case OBJECT: type = TokenTypes.INTERFACE.getId(); break; case INTERSECTION: IntersectionTypeSymbol intSecSymbol = (IntersectionTypeSymbol) typeDescriptor; if (intSecSymbol.effectiveTypeDescriptor().typeKind() == TypeDescKind.RECORD) { type = TokenTypes.STRUCT.getId(); if (intSecSymbol.memberTypeDescriptors().stream().anyMatch(desc -> desc.typeKind() == TypeDescKind.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } } break; case UNION: if (((TypeReferenceTypeSymbol) symbol.get()).definition().kind() == SymbolKind.ENUM) { type = TokenTypes.ENUM.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; default: type = TokenTypes.TYPE.getId(); break; } break; default: type = TokenTypes.TYPE.getId(); break; } declarationType = type; referenceType = type; } else { declarationType = TokenTypes.TYPE.getId(); referenceType = TokenTypes.TYPE.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case RECORD_FIELD: declarationType = TokenTypes.PROPERTY.getId(); referenceType = TokenTypes.PROPERTY.getId(); if (symbol.get() instanceof RecordFieldSymbol && ((RecordFieldSymbol) symbol.get()).qualifiers().contains(Qualifier.READONLY)) { declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); } else { declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); } break; case ENUM_MEMBER: declarationType = TokenTypes.ENUM_MEMBER.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(); referenceType = TokenTypes.ENUM_MEMBER.getId(); referenceModifiers = TokenTypeModifiers.READONLY.getId(); break; case FUNCTION: declarationType = TokenTypes.FUNCTION.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.FUNCTION.getId(); break; case METHOD: declarationType = TokenTypes.METHOD.getId(); declarationModifiers = TokenTypeModifiers.DECLARATION.getId(); referenceType = TokenTypes.METHOD.getId(); break; case ANNOTATION: this.addSemanticToken(node, TokenTypes.TYPE.getId(), 0, false, -1, -1); break; default: break; } int length = node.textRange().length(); if (declarationType != -1) { Optional<ModuleSymbol> moduleSymbol = symbol.get().getModule(); if (symbolLineRange.filePath().equals(this.semanticTokensContext.currentDocument().get().name()) && moduleSymbol.isPresent() && moduleSymbol.get().getName().isPresent() && this.semanticTokensContext.currentModule().isPresent() && moduleSymbol.get().getName().get() .equals(this.semanticTokensContext.currentModule().get().moduleId().moduleName())) { SemanticToken semanticToken = new SemanticToken(linePosition.line(), linePosition.offset()); if (!semanticTokens.contains(semanticToken)) { semanticToken.setProperties(length, declarationType, declarationModifiers == -1 ? 0 : declarationModifiers); semanticTokens.add(semanticToken); } } } if (referenceType != -1) { final int type = referenceType; final int modifiers = referenceModifiers == -1 ? 0 : referenceModifiers; List<Location> locations = semanticModel.get().references(symbol.get(), this.semanticTokensContext.currentDocument().get(), false); locations.stream().filter(location -> location.lineRange().filePath() .equals(this.semanticTokensContext.currentDocument().get().name())).forEach(location -> { LinePosition position = location.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(position.line(), position.offset()); if (!semanticTokens.contains(semanticToken) && location.textRange().length() == length) { semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); } }); } } /** * Adds a semantic token instance into the semanticTokens set for the given node. * * @param node Current node * @param type Semantic token type's index * @param modifiers Semantic token type modifiers' index * @param processReferences True if node references should be processed, false otherwise * @param refType Reference's semantic token type's index * @param refModifiers Reference's semantic token type modifiers' index */ private void addSemanticToken(Node node, int type, int modifiers, boolean processReferences, int refType, int refModifiers) { LinePosition startLine = node.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset()); if (!semanticTokens.contains(semanticToken)) { int length = node instanceof Token ? ((Token) node).text().trim().length() : node.textRange().length(); semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); if (processReferences) { handleReferences(startLine, length, refType, refModifiers); } } } /** * Handles references of the node that is located in the given position. * * @param linePosition Start position of the node * @param length Length to highlight * @param type Semantic token type's index * @param modifiers Semantic token type modifiers' index */ private void handleReferences(LinePosition linePosition, int length, int type, int modifiers) { Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel(); if (semanticModel.isEmpty()) { return; } Document document = this.semanticTokensContext.currentDocument().get(); List<Location> locations = semanticModel.get().references(document, document, linePosition, false); locations.stream().filter(location -> location.lineRange().filePath().equals(document.name())).forEach(location -> { LinePosition position = location.lineRange().startLine(); SemanticToken semanticToken = new SemanticToken(position.line(), position.offset()); if (!semanticTokens.contains(semanticToken) && location.textRange().length() == length) { semanticToken.setProperties(length, type, modifiers); semanticTokens.add(semanticToken); } }); } /** * Represents semantic token data for a node. */ static class SemanticToken implements Comparable<SemanticToken> { private final int line; private final int column; private int length; private int type; private int modifiers; private SemanticToken(int line, int column) { this.line = line; this.column = column; } private SemanticToken(int line, int column, int length, int type, int modifiers) { this.line = line; this.column = column; this.length = length; this.type = type; this.modifiers = modifiers; } private int getLine() { return line; } private int getColumn() { return column; } private int getLength() { return length; } private int getType() { return type; } private int getModifiers() { return modifiers; } public void setProperties(int length, int type, int modifiers) { this.length = length; this.type = type; this.modifiers = modifiers; } public SemanticToken processSemanticToken(List<Integer> data, SemanticToken previousToken) { int line = this.getLine(); int column = this.getColumn(); int prevTokenLine = line; int prevTokenColumn = column; if (previousToken != null) { if (line == previousToken.getLine()) { column -= previousToken.getColumn(); } line -= previousToken.getLine(); } data.add(line); data.add(column); data.add(this.getLength()); data.add(this.getType()); data.add(this.getModifiers()); return new SemanticToken(prevTokenLine, prevTokenColumn); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } SemanticToken semanticToken = (SemanticToken) obj; return line == semanticToken.line && column == semanticToken.column; } @Override public int hashCode() { return Objects.hash(line, column); } @Override public int compareTo(SemanticToken semanticToken) { if (this.line == semanticToken.line) { return this.column - semanticToken.column; } return this.line - semanticToken.line; } public static Comparator<SemanticToken> semanticTokenComparator = SemanticToken::compareTo; } }
That's not the most important thing this block is trying to accomplish: It's making sure an up-to-date node is fetched from node repo, mutated, and saved back, all under the application lock. This avoid overwriting fields others may have modified on the node since above.
void retireAllocated() { List<Node> allNodes = nodeRepository().getNodes(NodeType.tenant); List<ApplicationId> activeApplications = getActiveApplicationIds(allNodes); Map<Flavor, Map<Node.State, Long>> numSpareNodesByFlavorByState = getNumberOfNodesByFlavorByNodeState(allNodes); flavorSpareChecker.updateReadyAndActiveCountsByFlavor(numSpareNodesByFlavorByState); Map<Deployment, Set<Node>> nodesToRetireByDeployment = new HashMap<>(); for (ApplicationId applicationId : activeApplications) { List<Node> applicationNodes = getNodesBelongingToApplication(allNodes, applicationId); Set<Node> retireableNodes = getRetireableNodesForApplication(applicationNodes); long numNodesAllowedToRetire = getNumberNodesAllowToRetireForApplication(applicationNodes, MAX_SIMULTANEOUS_RETIRES_PER_APPLICATION); if (retireableNodes.isEmpty() || numNodesAllowedToRetire == 0) continue; Optional<Deployment> deployment = deployer.deployFromLocalActive(applicationId, Duration.ofMinutes(30)); if ( ! deployment.isPresent()) continue; Set<Node> replaceableNodes = retireableNodes.stream() .filter(node -> flavorSpareChecker.canRetireAllocatedNodeWithFlavor(node.flavor())) .limit(numNodesAllowedToRetire) .collect(Collectors.toSet()); if (! replaceableNodes.isEmpty()) nodesToRetireByDeployment.put(deployment.get(), replaceableNodes); } nodesToRetireByDeployment.forEach(((deployment, nodes) -> { ApplicationId app = nodes.iterator().next().allocation().get().owner(); Set<Node> nodesToRetire; try (Mutex lock = nodeRepository().lock(app)) { nodesToRetire = nodes.stream() .map(node -> nodeRepository().getNode(node.hostname()) .filter(upToDateNode -> node.state() == Node.State.active) .filter(upToDateNode -> node.allocation().get().owner().equals(upToDateNode.allocation().get().owner()))) .flatMap(node -> node.map(Stream::of).orElseGet(Stream::empty)) .collect(Collectors.toSet()); nodesToRetire.forEach(node -> { log.info("Setting wantToRetire and wantToDeprovision for host " + node.hostname() + " with flavor " + node.flavor().name() + " allocated to " + node.allocation().get().owner() + ". Policy: " + retirementPolicy.getClass().getSimpleName()); Node updatedNode = node.with(node.status() .withWantToRetire(true) .withWantToDeprovision(true)); nodeRepository().write(updatedNode); }); } if (! nodesToRetire.isEmpty()) deployment.activate(); })); }
void retireAllocated() { List<Node> allNodes = nodeRepository().getNodes(NodeType.tenant); List<ApplicationId> activeApplications = getActiveApplicationIds(allNodes); Map<Flavor, Map<Node.State, Long>> numSpareNodesByFlavorByState = getNumberOfNodesByFlavorByNodeState(allNodes); flavorSpareChecker.updateReadyAndActiveCountsByFlavor(numSpareNodesByFlavorByState); Map<Deployment, Set<Node>> nodesToRetireByDeployment = new HashMap<>(); for (ApplicationId applicationId : activeApplications) { List<Node> applicationNodes = getNodesBelongingToApplication(allNodes, applicationId); Set<Node> retireableNodes = getRetireableNodesForApplication(applicationNodes); long numNodesAllowedToRetire = getNumberNodesAllowToRetireForApplication(applicationNodes, MAX_SIMULTANEOUS_RETIRES_PER_APPLICATION); if (retireableNodes.isEmpty() || numNodesAllowedToRetire == 0) continue; Optional<Deployment> deployment = deployer.deployFromLocalActive(applicationId, Duration.ofMinutes(30)); if ( ! deployment.isPresent()) continue; Set<Node> replaceableNodes = retireableNodes.stream() .filter(node -> flavorSpareChecker.canRetireAllocatedNodeWithFlavor(node.flavor())) .limit(numNodesAllowedToRetire) .collect(Collectors.toSet()); if (! replaceableNodes.isEmpty()) nodesToRetireByDeployment.put(deployment.get(), replaceableNodes); } nodesToRetireByDeployment.forEach(((deployment, nodes) -> { ApplicationId app = nodes.iterator().next().allocation().get().owner(); Set<Node> nodesToRetire; try (Mutex lock = nodeRepository().lock(app)) { nodesToRetire = nodes.stream() .map(node -> nodeRepository().getNode(node.hostname()) .filter(upToDateNode -> node.state() == Node.State.active) .filter(upToDateNode -> node.allocation().get().owner().equals(upToDateNode.allocation().get().owner()))) .flatMap(node -> node.map(Stream::of).orElseGet(Stream::empty)) .collect(Collectors.toSet()); nodesToRetire.forEach(node -> { log.info("Setting wantToRetire and wantToDeprovision for host " + node.hostname() + " with flavor " + node.flavor().name() + " allocated to " + node.allocation().get().owner() + ". Policy: " + retirementPolicy.getClass().getSimpleName()); Node updatedNode = node.with(node.status() .withWantToRetire(true) .withWantToDeprovision(true)); nodeRepository().write(updatedNode); }); } if (! nodesToRetire.isEmpty()) deployment.activate(); })); }
class NodeRetirer extends Maintainer { public static final FlavorSpareChecker.SpareNodesPolicy SPARE_NODES_POLICY = flavorSpareCount -> flavorSpareCount.getNumReadyAmongReplacees() > 2; private static final long MAX_SIMULTANEOUS_RETIRES_PER_APPLICATION = 1; private static final Logger log = Logger.getLogger(NodeRetirer.class.getName()); private final Deployer deployer; private final FlavorSpareChecker flavorSpareChecker; private final RetirementPolicy retirementPolicy; public NodeRetirer(NodeRepository nodeRepository, Zone zone, FlavorSpareChecker flavorSpareChecker, Duration interval, Deployer deployer, JobControl jobControl, RetirementPolicy retirementPolicy, Zone... applies) { super(nodeRepository, interval, jobControl); if (! Arrays.asList(applies).contains(zone)) { String targetZones = Arrays.stream(applies).map(Zone::toString).collect(Collectors.joining(", ")); log.info("NodeRetirer should only run in " + targetZones + " and not in " + zone + ", stopping."); deconstruct(); } this.deployer = deployer; this.retirementPolicy = retirementPolicy; this.flavorSpareChecker = flavorSpareChecker; } @Override protected void maintain() { if (retireUnallocated()) { retireAllocated(); } } /** * Retires unallocated nodes by moving them directly to parked. * Returns true iff all there are no unallocated nodes that match the retirement policy */ boolean retireUnallocated() { try (Mutex lock = nodeRepository().lockUnallocated()) { List<Node> allNodes = nodeRepository().getNodes(NodeType.tenant); Map<Flavor, Map<Node.State, Long>> numSpareNodesByFlavorByState = getNumberOfNodesByFlavorByNodeState(allNodes); flavorSpareChecker.updateReadyAndActiveCountsByFlavor(numSpareNodesByFlavorByState); long numFlavorsWithUnsuccessfullyRetiredNodes = allNodes.stream() .filter(node -> node.state() == Node.State.ready) .filter(retirementPolicy::shouldRetire) .collect(Collectors.groupingBy( Node::flavor, Collectors.toSet())) .entrySet().stream() .filter(entry -> { Set<Node> nodesThatShouldBeRetiredForFlavor = entry.getValue(); for (Iterator<Node> iter = nodesThatShouldBeRetiredForFlavor.iterator(); iter.hasNext(); ) { Node nodeToRetire = iter.next(); if (! flavorSpareChecker.canRetireUnallocatedNodeWithFlavor(nodeToRetire.flavor())) break; nodeRepository().write(nodeToRetire.with(nodeToRetire.status().withWantToDeprovision(true))); nodeRepository().park(nodeToRetire.hostname(), Agent.NodeRetirer, "Policy: " + retirementPolicy.getClass().getSimpleName()); iter.remove(); } if (! nodesThatShouldBeRetiredForFlavor.isEmpty()) { String commaSeparatedHostnames = nodesThatShouldBeRetiredForFlavor.stream().map(Node::hostname) .collect(Collectors.joining(", ")); log.info(String.format("Failed to retire %s, wanted to retire %d nodes (%s), but there are no spare nodes left.", entry.getKey(), nodesThatShouldBeRetiredForFlavor.size(), commaSeparatedHostnames)); } return ! nodesThatShouldBeRetiredForFlavor.isEmpty(); }).count(); return numFlavorsWithUnsuccessfullyRetiredNodes == 0; } } private List<Node> getNodesBelongingToApplication(List<Node> allNodes, ApplicationId applicationId) { return allNodes.stream() .filter(node -> node.allocation().isPresent()) .filter(node -> node.allocation().get().owner().equals(applicationId)) .collect(Collectors.toList()); } /** * Returns a list of ApplicationIds sorted by number of active nodes the application has allocated to it */ List<ApplicationId> getActiveApplicationIds(List<Node> nodes) { return nodes.stream() .filter(node -> node.state() == Node.State.active) .collect(Collectors.groupingBy( node -> node.allocation().get().owner(), Collectors.counting())) .entrySet().stream() .sorted((c1, c2) -> c2.getValue().compareTo(c1.getValue())) .map(Map.Entry::getKey) .collect(Collectors.toList()); } /** * @param applicationNodes All the nodes allocated to an application * @return Set of nodes that all should eventually be retired */ Set<Node> getRetireableNodesForApplication(List<Node> applicationNodes) { return applicationNodes.stream() .filter(node -> node.state() == Node.State.active) .filter(node -> !node.status().wantToRetire()) .filter(retirementPolicy::shouldRetire) .collect(Collectors.toSet()); } /** * @param applicationNodes All the nodes allocated to an application * @return number of nodes we can safely start retiring */ long getNumberNodesAllowToRetireForApplication(List<Node> applicationNodes, long maxSimultaneousRetires) { long numNodesInWantToRetire = applicationNodes.stream() .filter(node -> node.status().wantToRetire()) .filter(node -> node.state() != Node.State.parked) .count(); return Math.max(0, maxSimultaneousRetires - numNodesInWantToRetire); } private Map<Flavor, Map<Node.State, Long>> getNumberOfNodesByFlavorByNodeState(List<Node> allNodes) { return allNodes.stream() .collect(Collectors.groupingBy( Node::flavor, Collectors.groupingBy(Node::state, Collectors.counting()))); } }
class NodeRetirer extends Maintainer { public static final FlavorSpareChecker.SpareNodesPolicy SPARE_NODES_POLICY = flavorSpareCount -> flavorSpareCount.getNumReadyAmongReplacees() > 2; private static final long MAX_SIMULTANEOUS_RETIRES_PER_APPLICATION = 1; private static final Logger log = Logger.getLogger(NodeRetirer.class.getName()); private final Deployer deployer; private final FlavorSpareChecker flavorSpareChecker; private final RetirementPolicy retirementPolicy; public NodeRetirer(NodeRepository nodeRepository, Zone zone, FlavorSpareChecker flavorSpareChecker, Duration interval, Deployer deployer, JobControl jobControl, RetirementPolicy retirementPolicy, Zone... applies) { super(nodeRepository, interval, jobControl); if (! Arrays.asList(applies).contains(zone)) { String targetZones = Arrays.stream(applies).map(Zone::toString).collect(Collectors.joining(", ")); log.info("NodeRetirer should only run in " + targetZones + " and not in " + zone + ", stopping."); deconstruct(); } this.deployer = deployer; this.retirementPolicy = retirementPolicy; this.flavorSpareChecker = flavorSpareChecker; } @Override protected void maintain() { if (retireUnallocated()) { retireAllocated(); } } /** * Retires unallocated nodes by moving them directly to parked. * Returns true iff all there are no unallocated nodes that match the retirement policy */ boolean retireUnallocated() { try (Mutex lock = nodeRepository().lockUnallocated()) { List<Node> allNodes = nodeRepository().getNodes(NodeType.tenant); Map<Flavor, Map<Node.State, Long>> numSpareNodesByFlavorByState = getNumberOfNodesByFlavorByNodeState(allNodes); flavorSpareChecker.updateReadyAndActiveCountsByFlavor(numSpareNodesByFlavorByState); long numFlavorsWithUnsuccessfullyRetiredNodes = allNodes.stream() .filter(node -> node.state() == Node.State.ready) .filter(retirementPolicy::shouldRetire) .collect(Collectors.groupingBy( Node::flavor, Collectors.toSet())) .entrySet().stream() .filter(entry -> { Set<Node> nodesThatShouldBeRetiredForFlavor = entry.getValue(); for (Iterator<Node> iter = nodesThatShouldBeRetiredForFlavor.iterator(); iter.hasNext(); ) { Node nodeToRetire = iter.next(); if (! flavorSpareChecker.canRetireUnallocatedNodeWithFlavor(nodeToRetire.flavor())) break; nodeRepository().write(nodeToRetire.with(nodeToRetire.status().withWantToDeprovision(true))); nodeRepository().park(nodeToRetire.hostname(), Agent.NodeRetirer, "Policy: " + retirementPolicy.getClass().getSimpleName()); iter.remove(); } if (! nodesThatShouldBeRetiredForFlavor.isEmpty()) { String commaSeparatedHostnames = nodesThatShouldBeRetiredForFlavor.stream().map(Node::hostname) .collect(Collectors.joining(", ")); log.info(String.format("Failed to retire %s, wanted to retire %d nodes (%s), but there are no spare nodes left.", entry.getKey(), nodesThatShouldBeRetiredForFlavor.size(), commaSeparatedHostnames)); } return ! nodesThatShouldBeRetiredForFlavor.isEmpty(); }).count(); return numFlavorsWithUnsuccessfullyRetiredNodes == 0; } } private List<Node> getNodesBelongingToApplication(List<Node> allNodes, ApplicationId applicationId) { return allNodes.stream() .filter(node -> node.allocation().isPresent()) .filter(node -> node.allocation().get().owner().equals(applicationId)) .collect(Collectors.toList()); } /** * Returns a list of ApplicationIds sorted by number of active nodes the application has allocated to it */ List<ApplicationId> getActiveApplicationIds(List<Node> nodes) { return nodes.stream() .filter(node -> node.state() == Node.State.active) .collect(Collectors.groupingBy( node -> node.allocation().get().owner(), Collectors.counting())) .entrySet().stream() .sorted((c1, c2) -> c2.getValue().compareTo(c1.getValue())) .map(Map.Entry::getKey) .collect(Collectors.toList()); } /** * @param applicationNodes All the nodes allocated to an application * @return Set of nodes that all should eventually be retired */ Set<Node> getRetireableNodesForApplication(List<Node> applicationNodes) { return applicationNodes.stream() .filter(node -> node.state() == Node.State.active) .filter(node -> !node.status().wantToRetire()) .filter(retirementPolicy::shouldRetire) .collect(Collectors.toSet()); } /** * @param applicationNodes All the nodes allocated to an application * @return number of nodes we can safely start retiring */ long getNumberNodesAllowToRetireForApplication(List<Node> applicationNodes, long maxSimultaneousRetires) { long numNodesInWantToRetire = applicationNodes.stream() .filter(node -> node.status().wantToRetire()) .filter(node -> node.state() != Node.State.parked) .count(); return Math.max(0, maxSimultaneousRetires - numNodesInWantToRetire); } private Map<Flavor, Map<Node.State, Long>> getNumberOfNodesByFlavorByNodeState(List<Node> allNodes) { return allNodes.stream() .collect(Collectors.groupingBy( Node::flavor, Collectors.groupingBy(Node::state, Collectors.counting()))); } }
We can set it, but we don't need to use it.
public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalScanOperator scanOperator = (LogicalScanOperator) input.getOp(); ColumnRefSet requiredOutputColumns = context.getTaskContext().getRequiredColumns(); Set<ColumnRefOperator> outputColumns = scanOperator.getColRefToColumnMetaMap().keySet().stream().filter(requiredOutputColumns::contains) .collect(Collectors.toSet()); outputColumns.addAll(Utils.extractColumnRef(scanOperator.getPredicate())); boolean canUseAnyColumn = false; if (outputColumns.size() == 0) { outputColumns.add(Utils.findSmallestColumnRef( new ArrayList<>(scanOperator.getColRefToColumnMetaMap().keySet()))); canUseAnyColumn = true; } if (!context.getSessionVariable().isEnableCountStarOptimization()) { canUseAnyColumn = false; } if (scanOperator.getColRefToColumnMetaMap().keySet().equals(outputColumns)) { scanOperator.setCanUseAnyColumn(canUseAnyColumn); return Collections.emptyList(); } else { Map<ColumnRefOperator, Column> newColumnRefMap = outputColumns.stream() .collect(Collectors.toMap(identity(), scanOperator.getColRefToColumnMetaMap()::get)); if (scanOperator instanceof LogicalOlapScanOperator) { LogicalOlapScanOperator olapScanOperator = (LogicalOlapScanOperator) scanOperator; LogicalOlapScanOperator.Builder builder = new LogicalOlapScanOperator.Builder(); LogicalOlapScanOperator newScanOperator = builder.withOperator(olapScanOperator) .setColRefToColumnMetaMap(newColumnRefMap).build(); newScanOperator.setCanUseAnyColumn(canUseAnyColumn); return Lists.newArrayList(new OptExpression(newScanOperator)); } else { LogicalScanOperator.Builder builder = OperatorBuilderFactory.build(scanOperator); scanOperator.setCanUseAnyColumn(canUseAnyColumn); Operator newScanOperator = builder.withOperator(scanOperator).setColRefToColumnMetaMap(newColumnRefMap).build(); return Lists.newArrayList(new OptExpression(newScanOperator)); } } }
boolean canUseAnyColumn = false;
public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalScanOperator scanOperator = (LogicalScanOperator) input.getOp(); ColumnRefSet requiredOutputColumns = context.getTaskContext().getRequiredColumns(); Set<ColumnRefOperator> outputColumns = scanOperator.getColRefToColumnMetaMap().keySet().stream().filter(requiredOutputColumns::contains) .collect(Collectors.toSet()); outputColumns.addAll(Utils.extractColumnRef(scanOperator.getPredicate())); boolean canUseAnyColumn = false; if (outputColumns.size() == 0) { outputColumns.add(Utils.findSmallestColumnRef( new ArrayList<>(scanOperator.getColRefToColumnMetaMap().keySet()))); canUseAnyColumn = true; } if (!context.getSessionVariable().isEnableCountStarOptimization()) { canUseAnyColumn = false; } if (scanOperator.getColRefToColumnMetaMap().keySet().equals(outputColumns)) { scanOperator.setCanUseAnyColumn(canUseAnyColumn); return Collections.emptyList(); } else { Map<ColumnRefOperator, Column> newColumnRefMap = outputColumns.stream() .collect(Collectors.toMap(identity(), scanOperator.getColRefToColumnMetaMap()::get)); if (scanOperator instanceof LogicalOlapScanOperator) { LogicalOlapScanOperator olapScanOperator = (LogicalOlapScanOperator) scanOperator; LogicalOlapScanOperator.Builder builder = new LogicalOlapScanOperator.Builder(); LogicalOlapScanOperator newScanOperator = builder.withOperator(olapScanOperator) .setColRefToColumnMetaMap(newColumnRefMap).build(); newScanOperator.setCanUseAnyColumn(canUseAnyColumn); return Lists.newArrayList(new OptExpression(newScanOperator)); } else { LogicalScanOperator.Builder builder = OperatorBuilderFactory.build(scanOperator); scanOperator.setCanUseAnyColumn(canUseAnyColumn); Operator newScanOperator = builder.withOperator(scanOperator).setColRefToColumnMetaMap(newColumnRefMap).build(); return Lists.newArrayList(new OptExpression(newScanOperator)); } } }
class PruneScanColumnRule extends TransformationRule { public static final PruneScanColumnRule OLAP_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_OLAP_SCAN); public static final PruneScanColumnRule SCHEMA_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_SCHEMA_SCAN); public static final PruneScanColumnRule MYSQL_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_MYSQL_SCAN); public static final PruneScanColumnRule ES_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_ES_SCAN); public static final PruneScanColumnRule JDBC_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_JDBC_SCAN); public static final PruneScanColumnRule BINLOG_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_BINLOG_SCAN); public PruneScanColumnRule(OperatorType logicalOperatorType) { super(RuleType.TF_PRUNE_OLAP_SCAN_COLUMNS, Pattern.create(logicalOperatorType)); } @Override }
class PruneScanColumnRule extends TransformationRule { public static final PruneScanColumnRule OLAP_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_OLAP_SCAN); public static final PruneScanColumnRule SCHEMA_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_SCHEMA_SCAN); public static final PruneScanColumnRule MYSQL_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_MYSQL_SCAN); public static final PruneScanColumnRule ES_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_ES_SCAN); public static final PruneScanColumnRule JDBC_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_JDBC_SCAN); public static final PruneScanColumnRule BINLOG_SCAN = new PruneScanColumnRule(OperatorType.LOGICAL_BINLOG_SCAN); public PruneScanColumnRule(OperatorType logicalOperatorType) { super(RuleType.TF_PRUNE_OLAP_SCAN_COLUMNS, Pattern.create(logicalOperatorType)); } @Override }
I initialized vals[1] with an empty by default so that the NPE wouldnt arise. But ideally, the default value provided in the mock annotation should be applied in this case.
public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { parent = (BLangPackage) ((BLangFunction) functionNode).parent; String packageName = getPackageName(parent); annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = packageName; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } String value = valueExpr.toString(); if (MODULE.equals(name)) { value = formatPackageName(value, parent); vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); if (vals[1].isEmpty()) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "function name cannot be empty"); break; } PackageID functionToMockID = getPackageID(vals[0]); if (functionToMockID == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find module specified "); } BType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]); BType mockFunctionType = getFunctionType(packageEnvironmentMap, parent.packageID, ((BLangFunction) functionNode).name.toString()); if (functionToMockType != null && mockFunctionType != null) { if (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, ((BLangFunction) functionNode).pos, "incompatible types: expected " + functionToMockType.toString() + " but found " + mockFunctionType.toString()); } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find functions in module"); } BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangFunction) functionNode).parent; bLangTestablePackage.addMockFunction(functionToMockID + MOCK_FN_DELIMITER + vals[1], functionName); } } } }
public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { BLangPackage parent = (BLangPackage) ((BLangFunction) functionNode).parent; String packageName = getPackageName(parent); annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = packageName; vals[1] = ""; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } String value = valueExpr.toString(); if (MODULE.equals(name)) { value = formatPackageName(value, parent); vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); if (vals[1].isEmpty()) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "function name cannot be empty"); break; } PackageID functionToMockID = getPackageID(vals[0]); if (functionToMockID == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find module specified "); } BType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]); BType mockFunctionType = getFunctionType(packageEnvironmentMap, parent.packageID, ((BLangFunction) functionNode).name.toString()); if (functionToMockType != null && mockFunctionType != null) { if (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, ((BLangFunction) functionNode).pos, "incompatible types: expected " + functionToMockType.toString() + " but found " + mockFunctionType.toString()); } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find functions in module"); } BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangFunction) functionNode).parent; bLangTestablePackage.addMockFunction(functionToMockID + MOCK_FN_DELIMITER + vals[1], functionName); } } } }
class MockAnnotationProcessor extends AbstractCompilerPlugin { private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String MODULE = "moduleName"; private static final String FUNCTION = "functionName"; private static final String MOCK_ANNOTATION_DELIMITER = " private static final String MOCK_FN_DELIMITER = "~"; private CompilerContext compilerContext; private DiagnosticLog diagnosticLog; private PackageCache packageCache; private Map<BPackageSymbol, SymbolEnv> packageEnvironmentMap; private BLangPackage parent; private SymbolResolver symbolResolver; private Types typeChecker; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; this.packageEnvironmentMap = SymbolTable.getInstance(compilerContext).pkgEnvMap; this.packageCache = PackageCache.getInstance(compilerContext); this.symbolResolver = SymbolResolver.getInstance(compilerContext); this.typeChecker = Types.getInstance(compilerContext); } @Override public void setCompilerContext(CompilerContext context) { this.compilerContext = context; } @Override public void process(SimpleVariableNode simpleVariableNode, List<AnnotationAttachmentNode> annotations) { BLangPackage parent = (BLangPackage) ((BLangSimpleVariable) simpleVariableNode).parent; String packageName = getPackageName(parent); annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String type = ((BLangUserDefinedType) ((BLangSimpleVariable) simpleVariableNode).typeNode). typeName.getValue(); if (type.equals("MockFunction")) { String mockFnObjectName = simpleVariableNode.getName().getValue(); String[] annotationValues = new String[2]; annotationValues[0] = packageName; if (attachmentNode.getExpression().getKind() == NodeKind.RECORD_LITERAL_EXPR) { List<RecordLiteralNode.RecordField> fields = ((BLangRecordLiteral) attachmentNode.getExpression()).getFields(); setAnnotationValues(fields, annotationValues, attachmentNode, parent); PackageID functionToMockID = getPackageID(annotationValues[0]); validateFunctionName(annotationValues[1], functionToMockID, attachmentNode); BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangSimpleVariable) simpleVariableNode).parent; bLangTestablePackage.addMockFunction( functionToMockID + MOCK_ANNOTATION_DELIMITER + annotationValues[1], mockFnObjectName); } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Annotation can only be attached to a test:MockFunction object"); } } } } @Override /** * Iterate through each field and assign the annotation values for moduleName and functionName. * * @param fields list of fields * @param annotationValues Array of annotation values * @param attachmentNode AnnotationAttachmentNode * @param parent BLangPackage */ private void setAnnotationValues(List<RecordLiteralNode.RecordField> fields, String[] annotationValues, AnnotationAttachmentNode attachmentNode, BLangPackage parent) { fields.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); String value = valueExpr.toString(); if (MODULE.equals(name)) { value = formatPackageName(value, parent); annotationValues[0] = value; } else if (FUNCTION.equals(name)) { annotationValues[1] = value; } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Annotation fields must be key-value pairs"); } }); } /** * Returns a PackageID for the passed moduleName. * * @param moduleName Module name passed via function annotation * @return Module packageID */ private PackageID getPackageID(String moduleName) { if (packageCache.getSymbol(moduleName) != null) { return packageCache.getSymbol(moduleName).pkgID; } else { return null; } } /** * Formats the package name obtained from the mock annotation. * Checks for empty, '.', or single module names and replaces them. * Ballerina modules and fully qualified packages are simply returned * * @param value package name * @return formatted package name */ private String formatPackageName(String value, BLangPackage parent) { if (value.isEmpty() || value.equals(Names.DOT.value)) { value = parent.packageID.toString(); } else if (!value.contains(Names.ORG_NAME_SEPARATOR.value) && !value.contains(Names.VERSION_SEPARATOR.value)) { value = new PackageID(parent.packageID.orgName, new Name(value), parent.packageID.version).toString(); } return value; } /** * Validates the function name provided in the annotation. * * @param functionName Name of the function to mock * @param attachmentNode MockFunction object attachment node */ private void validateFunctionName(String functionName, PackageID functionToMockID, AnnotationAttachmentNode attachmentNode) { if (functionToMockID == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find module specified "); } else { if (functionName == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Function name cannot be empty"); } else { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : this.packageEnvironmentMap.entrySet()) { if (entry.getKey().pkgID.equals(functionToMockID)) { if (entry.getValue().scope.entries.containsKey(new Name(functionName))) { return; } } } diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Function \'" + functionName + "\' cannot be found in the package \'" + functionToMockID.toString()); } } } /** * Get Package Name. * @param packageNode PackageNode instance * @return package name */ private String getPackageName(PackageNode packageNode) { BLangPackage bLangPackage = ((BLangPackage) packageNode); return bLangPackage.packageID.toString(); } /** * Get the function type by iterating through the packageEnvironmentMap. * * @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv * @param packageID Fully qualified package ID of the respective function * @param functionName Name of the function * @return Function type if found, null if not found */ private BType getFunctionType(Map<BPackageSymbol, SymbolEnv> pkgEnvMap, PackageID packageID, String functionName) { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : pkgEnvMap.entrySet()) { if (entry.getKey().pkgID.equals(packageID)) { BSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName)); if (!symbol.getType().toString().equals("other")) { return symbol.getType(); } } } return null; } }
class MockAnnotationProcessor extends AbstractCompilerPlugin { private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String MODULE = "moduleName"; private static final String FUNCTION = "functionName"; private static final String MOCK_ANNOTATION_DELIMITER = " private static final String MOCK_FN_DELIMITER = "~"; private CompilerContext compilerContext; private DiagnosticLog diagnosticLog; private PackageCache packageCache; private Map<BPackageSymbol, SymbolEnv> packageEnvironmentMap; private SymbolResolver symbolResolver; private Types typeChecker; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; this.packageEnvironmentMap = SymbolTable.getInstance(compilerContext).pkgEnvMap; this.packageCache = PackageCache.getInstance(compilerContext); this.symbolResolver = SymbolResolver.getInstance(compilerContext); this.typeChecker = Types.getInstance(compilerContext); } @Override public void setCompilerContext(CompilerContext context) { this.compilerContext = context; } @Override public void process(SimpleVariableNode simpleVariableNode, List<AnnotationAttachmentNode> annotations) { BLangPackage parent = (BLangPackage) ((BLangSimpleVariable) simpleVariableNode).parent; String packageName = getPackageName(parent); annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String type = ((BLangUserDefinedType) ((BLangSimpleVariable) simpleVariableNode).typeNode). typeName.getValue(); if (type.equals("MockFunction")) { String mockFnObjectName = simpleVariableNode.getName().getValue(); String[] annotationValues = new String[2]; annotationValues[0] = packageName; if (attachmentNode.getExpression().getKind() == NodeKind.RECORD_LITERAL_EXPR) { List<RecordLiteralNode.RecordField> fields = ((BLangRecordLiteral) attachmentNode.getExpression()).getFields(); setAnnotationValues(fields, annotationValues, attachmentNode, parent); PackageID functionToMockID = getPackageID(annotationValues[0]); validateFunctionName(annotationValues[1], functionToMockID, attachmentNode); BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangSimpleVariable) simpleVariableNode).parent; bLangTestablePackage.addMockFunction( functionToMockID + MOCK_ANNOTATION_DELIMITER + annotationValues[1], mockFnObjectName); } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Annotation can only be attached to a test:MockFunction object"); } } } } @Override /** * Iterate through each field and assign the annotation values for moduleName and functionName. * * @param fields list of fields * @param annotationValues Array of annotation values * @param attachmentNode AnnotationAttachmentNode * @param parent BLangPackage */ private void setAnnotationValues(List<RecordLiteralNode.RecordField> fields, String[] annotationValues, AnnotationAttachmentNode attachmentNode, BLangPackage parent) { fields.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); String value = valueExpr.toString(); if (MODULE.equals(name)) { value = formatPackageName(value, parent); annotationValues[0] = value; } else if (FUNCTION.equals(name)) { annotationValues[1] = value; } } else { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Annotation fields must be key-value pairs"); } }); } /** * Returns a PackageID for the passed moduleName. * * @param moduleName Module name passed via function annotation * @return Module packageID */ private PackageID getPackageID(String moduleName) { if (packageCache.getSymbol(moduleName) != null) { return packageCache.getSymbol(moduleName).pkgID; } else { return null; } } /** * Formats the package name obtained from the mock annotation. * Checks for empty, '.', or single module names and replaces them. * Ballerina modules and fully qualified packages are simply returned * * @param value package name * @return formatted package name */ private String formatPackageName(String value, BLangPackage parent) { if (value.isEmpty() || value.equals(Names.DOT.value)) { value = parent.packageID.toString(); } else if (!value.contains(Names.ORG_NAME_SEPARATOR.value) && !value.contains(Names.VERSION_SEPARATOR.value)) { value = new PackageID(parent.packageID.orgName, new Name(value), parent.packageID.version).toString(); } return value; } /** * Validates the function name provided in the annotation. * * @param functionName Name of the function to mock * @param attachmentNode MockFunction object attachment node */ private void validateFunctionName(String functionName, PackageID functionToMockID, AnnotationAttachmentNode attachmentNode) { if (functionToMockID == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "could not find module specified "); } else { if (functionName == null) { diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Function name cannot be empty"); } else { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : this.packageEnvironmentMap.entrySet()) { if (entry.getKey().pkgID.equals(functionToMockID)) { if (entry.getValue().scope.entries.containsKey(new Name(functionName))) { return; } } } diagnosticLog.logDiagnostic(DiagnosticSeverity.ERROR, attachmentNode.getPosition(), "Function \'" + functionName + "\' cannot be found in the package \'" + functionToMockID.toString()); } } } /** * Get Package Name. * @param packageNode PackageNode instance * @return package name */ private String getPackageName(PackageNode packageNode) { BLangPackage bLangPackage = ((BLangPackage) packageNode); return bLangPackage.packageID.toString(); } /** * Get the function type by iterating through the packageEnvironmentMap. * * @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv * @param packageID Fully qualified package ID of the respective function * @param functionName Name of the function * @return Function type if found, null if not found */ private BType getFunctionType(Map<BPackageSymbol, SymbolEnv> pkgEnvMap, PackageID packageID, String functionName) { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : pkgEnvMap.entrySet()) { if (entry.getKey().pkgID.equals(packageID)) { BSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName)); if (!symbol.getType().toString().equals("other")) { return symbol.getType(); } } } return null; } }
@maxandersen No, This PR will affect neither `http://127.0.0.1:8080/q/dev/` nor `0.0.0.0:8080/q/dev/` since DevUI pages are not implemented as SPA using XHR or Fetch scripts, so `Origin` will not be even produced by the browser. For the next version of DevUI the Origin check may need to be adjusted.
public void handle(RoutingContext event) { HttpServerRequest request = event.request(); HttpServerResponse response = event.response(); String origin = request.getHeader(HttpHeaders.ORIGIN); if (origin == null) { event.next(); } else { if (!origin.contains(LOCAL_HOST)) { response.end(); } else { super.handle(event); } } }
response.end();
public void handle(RoutingContext event) { HttpServerRequest request = event.request(); HttpServerResponse response = event.response(); String origin = request.getHeader(HttpHeaders.ORIGIN); if (origin == null) { corsFilter().handle(event); } else { if (origin.startsWith(HTTP_LOCAL_HOST) || origin.startsWith(HTTPS_LOCAL_HOST) || origin.startsWith(HTTP_LOCAL_HOST_IP) || origin.startsWith(HTTPS_LOCAL_HOST_IP)) { corsFilter().handle(event); } else { LOG.errorf("Only localhost origin is allowed, but Origin header value is: %s", origin); response.setStatusCode(403); response.setStatusMessage("CORS Rejected - Invalid origin"); response.end(); } } }
class DevConsoleCORSFilter extends CORSFilter { private static final String LOCAL_HOST = "localhost"; private static final String HTTP_LOCAL_HOST = "http: private static final String HTTPS_LOCAL_HOST = "https: public DevConsoleCORSFilter() { super(corsConfig()); } private static CORSConfig corsConfig() { CORSConfig config = new CORSConfig(); config.origins = Optional.of(List.of(HTTP_LOCAL_HOST, HTTPS_LOCAL_HOST)); return config; } @Override }
class DevConsoleCORSFilter implements Handler<RoutingContext> { private static final Logger LOG = Logger.getLogger(DevConsoleCORSFilter.class); private static final String HTTP_PORT_CONFIG_PROP = "quarkus.http.port"; private static final String HTTPS_PORT_CONFIG_PROP = "quarkus.http.ssl-port"; private static final String LOCAL_HOST = "localhost"; private static final String LOCAL_HOST_IP = "127.0.0.1"; private static final String HTTP_LOCAL_HOST = "http: private static final String HTTPS_LOCAL_HOST = "https: private static final String HTTP_LOCAL_HOST_IP = "http: private static final String HTTPS_LOCAL_HOST_IP = "https: public DevConsoleCORSFilter() { } private static CORSFilter corsFilter() { int httpPort = ConfigProvider.getConfig().getValue(HTTP_PORT_CONFIG_PROP, int.class); int httpsPort = ConfigProvider.getConfig().getValue(HTTPS_PORT_CONFIG_PROP, int.class); CORSConfig config = new CORSConfig(); config.origins = Optional.of(List.of( HTTP_LOCAL_HOST + ":" + httpPort, HTTP_LOCAL_HOST_IP + ":" + httpPort, HTTPS_LOCAL_HOST + ":" + httpsPort, HTTPS_LOCAL_HOST_IP + ":" + httpsPort)); return new CORSFilter(config); } @Override }
Why not simply use an `AsyncCache`? Then you can write this as, ```java boolean[] isCurrentThreadComputation = { false }; var future = cache.get(key, (k, executor) -> { isCurrentThreadComputation[0] = true; return CompletableFuture.supplyAsync(() -> toCacheValue(valueLoader.call()), executor); }); try { Object value = isCurrentThreadComputation[0] ? future.get() : future.get(lockTimeout, TimeUnit.MILLISECONDS); return fromCacheValue(value); } catch (TimeoutException e) { return valueLoader.call(); } ```
public Object get(Object key, Callable<Object> valueLoader, long lockTimeout) throws Exception { if (lockTimeout <= 0) { return fromCacheValue(cache.get(key, new MappingFunction(valueLoader))); } /* * If the current key is not already associated with a value in the Caffeine cache, there's no way to know if the * current thread or another one is responsible for computing the missing value. While we want to wait at most for the * lock timeout delay if the value is being computed by another thread, we also want to ignore the timeout if the value * is being computed from the current thread. The following variable will be used to make sure we don't interrupt a * current-thread computation. */ AtomicBoolean isCurrentThreadComputation = new AtomicBoolean(); CompletableFuture<Object> future = CompletableFuture.supplyAsync(() -> { return fromCacheValue(cache.get(key, new MappingFunction(valueLoader, isCurrentThreadComputation))); }); try { return future.get(lockTimeout, TimeUnit.MILLISECONDS); } catch (TimeoutException e) { if (!isCurrentThreadComputation.get()) { return valueLoader.call(); } } return future.get(); }
return fromCacheValue(cache.get(key, new MappingFunction(valueLoader, isCurrentThreadComputation)));
public Object get(Object key, Callable<Object> valueLoader, long lockTimeout) throws Exception { if (lockTimeout <= 0) { return fromCacheValue(cache.synchronous().get(key, k -> new MappingSupplier(valueLoader).get())); } /* * If the current key is not already associated with a value in the Caffeine cache, there's no way to know if the * current thread or another one started the missing value computation. The following variable will be used to * determine whether or not a timeout should be triggered during the computation depending on which thread started it. */ boolean[] isCurrentThreadComputation = { false }; CompletableFuture<Object> future = cache.get(key, (k, executor) -> { isCurrentThreadComputation[0] = true; return CompletableFuture.supplyAsync(new MappingSupplier(valueLoader), executor); }); if (isCurrentThreadComputation[0]) { return fromCacheValue(future.get()); } else { try { return fromCacheValue(future.get(lockTimeout, TimeUnit.MILLISECONDS)); } catch (TimeoutException e) { return valueLoader.call(); } } }
class CaffeineCache implements Cache { private com.github.benmanes.caffeine.cache.Cache<Object, Object> cache; private String name; private Integer initialCapacity; private Long maximumSize; private Duration expireAfterWrite; private Duration expireAfterAccess; public CaffeineCache(CaffeineCacheInfo cacheInfo) { this.name = cacheInfo.name; Caffeine<Object, Object> builder = Caffeine.newBuilder(); builder.executor(Runnable::run); if (cacheInfo.initialCapacity != null) { this.initialCapacity = cacheInfo.initialCapacity; builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { this.maximumSize = cacheInfo.maximumSize; builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { this.expireAfterWrite = cacheInfo.expireAfterWrite; builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { this.expireAfterAccess = cacheInfo.expireAfterAccess; builder.expireAfterAccess(cacheInfo.expireAfterAccess); } cache = builder.build(); } @Override @Override public void invalidate(Object key) { cache.invalidate(key); } @Override public void invalidateAll() { cache.invalidateAll(); } @Override public String getName() { return name; } public Integer getInitialCapacity() { return initialCapacity; } public Long getMaximumSize() { return maximumSize; } public Duration getExpireAfterWrite() { return expireAfterWrite; } public Duration getExpireAfterAccess() { return expireAfterAccess; } private static class MappingFunction implements Function<Object, Object> { private final Callable<?> valueLoader; private final AtomicBoolean isCurrentThreadComputation; public MappingFunction(Callable<?> valueLoader) { this(valueLoader, null); } public MappingFunction(Callable<?> valueLoader, AtomicBoolean isCurrentThreadComputation) { this.valueLoader = valueLoader; this.isCurrentThreadComputation = isCurrentThreadComputation; } @Override public Object apply(Object unusedArg) { if (isCurrentThreadComputation != null) { isCurrentThreadComputation.set(true); } try { return toCacheValue(valueLoader.call()); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } } }
class CaffeineCache { private AsyncCache<Object, Object> cache; private String name; private Integer initialCapacity; private Long maximumSize; private Duration expireAfterWrite; private Duration expireAfterAccess; public CaffeineCache(CaffeineCacheInfo cacheInfo) { this.name = cacheInfo.name; Caffeine<Object, Object> builder = Caffeine.newBuilder(); /* * The following line is a workaround for a GraalVM issue: https: * TODO: Remove it as soon as a Quarkus release depends on GraalVM 19.3.0 or higher. */ builder.executor(task -> ForkJoinPool.commonPool().execute(task)); if (cacheInfo.initialCapacity != null) { this.initialCapacity = cacheInfo.initialCapacity; builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { this.maximumSize = cacheInfo.maximumSize; builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { this.expireAfterWrite = cacheInfo.expireAfterWrite; builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { this.expireAfterAccess = cacheInfo.expireAfterAccess; builder.expireAfterAccess(cacheInfo.expireAfterAccess); } cache = builder.buildAsync(); } public void invalidate(Object key) { cache.synchronous().invalidate(key); } public void invalidateAll() { cache.synchronous().invalidateAll(); } public String getName() { return name; } public Integer getInitialCapacity() { return initialCapacity; } public Long getMaximumSize() { return maximumSize; } public Duration getExpireAfterWrite() { return expireAfterWrite; } public Duration getExpireAfterAccess() { return expireAfterAccess; } private static class MappingSupplier implements Supplier<Object> { private final Callable<?> valueLoader; public MappingSupplier(Callable<?> valueLoader) { this.valueLoader = valueLoader; } @Override public Object get() { try { return toCacheValue(valueLoader.call()); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } } }
stopServices may throw an exception on failure. But we want to ignore such a failure, I think. @hmusum Do we also want to ignore stopServices() if active? I think so. Taking down the container may be considered equivalent to a reboot of a node, in case stopping the services are first tried, then forced (KILL), as part of reboot.
private Optional<Container> removeContainerIfNeeded(ContainerNodeSpec nodeSpec) { Optional<Container> existingContainer = getContainer(); if (!existingContainer.isPresent()) return Optional.empty(); Optional<String> removeReason = shouldRemoveContainer(nodeSpec, existingContainer.get()); if (removeReason.isPresent()) { logger.info("Will remove container " + existingContainer.get() + ": " + removeReason.get()); if (existingContainer.get().state.isRunning()) { if (nodeSpec.nodeState == Node.State.active) { orchestratorSuspendNode(); } stopServices(); } vespaVersion = Optional.empty(); dockerOperations.removeContainer(existingContainer.get()); metricReceiver.unsetMetricsForContainer(hostname); containerState = ABSENT; return Optional.empty(); } return existingContainer; }
stopServices();
private Optional<Container> removeContainerIfNeeded(ContainerNodeSpec nodeSpec) { Optional<Container> existingContainer = getContainer(); if (!existingContainer.isPresent()) return Optional.empty(); Optional<String> removeReason = shouldRemoveContainer(nodeSpec, existingContainer.get()); if (removeReason.isPresent()) { logger.info("Will remove container " + existingContainer.get() + ": " + removeReason.get()); if (existingContainer.get().state.isRunning()) { if (nodeSpec.nodeState == Node.State.active) { orchestratorSuspendNode(); } try { stopServices(); } catch (Exception e) { logger.info("Failed stopping services, ignoring", e); } } vespaVersion = Optional.empty(); dockerOperations.removeContainer(existingContainer.get()); metricReceiver.unsetMetricsForContainer(hostname); containerState = ABSENT; return Optional.empty(); } return existingContainer; }
class NodeAgentImpl implements NodeAgent { private final AtomicBoolean terminated = new AtomicBoolean(false); private boolean isFrozen = true; private boolean wantFrozen = false; private boolean workToDoNow = true; private final Object monitor = new Object(); private final PrefixLogger logger; private DockerImage imageBeingDownloaded = null; private final String hostname; private final ContainerName containerName; private final NodeRepository nodeRepository; private final Orchestrator orchestrator; private final DockerOperations dockerOperations; private final Optional<StorageMaintainer> storageMaintainer; private final MetricReceiverWrapper metricReceiver; private final Environment environment; private final Clock clock; private final Optional<AclMaintainer> aclMaintainer; private final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private final LinkedList<String> debugMessages = new LinkedList<>(); private long delaysBetweenEachConvergeMillis = 30_000; private int numberOfUnhandledException = 0; private Instant lastConverge; private Thread loopThread; enum ContainerState { ABSENT, RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN, RUNNING } private ContainerState containerState = ABSENT; private NodeAttributes lastAttributesSet = null; private ContainerNodeSpec lastNodeSpec = null; private CpuUsageReporter lastCpuMetric; private Optional<String> vespaVersion = Optional.empty(); public NodeAgentImpl( final String hostName, final NodeRepository nodeRepository, final Orchestrator orchestrator, final DockerOperations dockerOperations, final Optional<StorageMaintainer> storageMaintainer, final MetricReceiverWrapper metricReceiver, final Environment environment, final Clock clock, final Optional<AclMaintainer> aclMaintainer) { this.nodeRepository = nodeRepository; this.orchestrator = orchestrator; this.hostname = hostName; this.containerName = ContainerName.fromHostname(hostName); this.dockerOperations = dockerOperations; this.storageMaintainer = storageMaintainer; this.logger = PrefixLogger.getNodeAgentLogger(NodeAgentImpl.class, containerName); this.metricReceiver = metricReceiver; this.environment = environment; this.clock = clock; this.aclMaintainer = aclMaintainer; this.lastConverge = clock.instant(); lastCpuMetric = new CpuUsageReporter(clock.instant()); dockerOperations.getContainer(containerName) .ifPresent(container -> { if (container.state.isRunning()) { vespaVersion = dockerOperations.getVespaVersion(container.name); lastCpuMetric = new CpuUsageReporter(container.created); } containerState = RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN; }); } @Override public boolean setFrozen(boolean frozen) { synchronized (monitor) { if (wantFrozen != frozen) { wantFrozen = frozen; addDebugMessage(wantFrozen ? "Freezing" : "Unfreezing"); signalWorkToBeDone(); } return isFrozen == frozen; } } private void addDebugMessage(String message) { synchronized (debugMessages) { while (debugMessages.size() > 1000) { debugMessages.pop(); } logger.debug(message); debugMessages.add("[" + sdf.format(new Date()) + "] " + message); } } @Override public Map<String, Object> debugInfo() { Map<String, Object> debug = new LinkedHashMap<>(); debug.put("Hostname", hostname); debug.put("isFrozen", isFrozen); debug.put("wantFrozen", wantFrozen); debug.put("terminated", terminated); debug.put("workToDoNow", workToDoNow); synchronized (debugMessages) { debug.put("History", new LinkedList<>(debugMessages)); } debug.put("Node repo state", lastNodeSpec.nodeState.name()); return debug; } @Override public void start(int intervalMillis) { addDebugMessage("Starting with interval " + intervalMillis + "ms"); delaysBetweenEachConvergeMillis = intervalMillis; if (loopThread != null) { throw new RuntimeException("Can not restart a node agent."); } loopThread = new Thread(() -> { while (! terminated.get()) tick(); }); loopThread.setName("tick-" + hostname); loopThread.start(); } @Override public void stop() { addDebugMessage("Stopping"); if (!terminated.compareAndSet(false, true)) { throw new RuntimeException("Can not re-stop a node agent."); } signalWorkToBeDone(); try { loopThread.join(10000); if (loopThread.isAlive()) { logger.error("Could not stop host thread " + hostname); } } catch (InterruptedException e1) { logger.error("Interrupted; Could not stop host thread " + hostname); } } private void experimentalWriteFile(final ContainerNodeSpec nodeSpec) { try { FilebeatConfigProvider filebeatConfigProvider = new FilebeatConfigProvider(environment); Optional<String> config = filebeatConfigProvider.getConfig(nodeSpec); if (! config.isPresent()) { logger.error("Was not able to generate a config for filebeat, ignoring filebeat file creation." + nodeSpec.toString()); return; } Path filebeatPath = environment.pathInNodeAdminFromPathInNode(containerName, "/etc/filebeat/filebeat.yml"); Files.write(filebeatPath, config.get().getBytes()); logger.info("Wrote filebeat config."); } catch (Throwable t) { logger.error("Failed writing filebeat config; " + nodeSpec, t); } } private void runLocalResumeScriptIfNeeded(final ContainerNodeSpec nodeSpec) { if (containerState != RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN) { return; } experimentalWriteFile(nodeSpec); addDebugMessage("Starting optional node program resume command"); logger.info("Starting optional node program resume command"); dockerOperations.resumeNode(containerName); containerState = RUNNING; } private void updateNodeRepoAndMarkNodeAsReady(ContainerNodeSpec nodeSpec) { publishStateToNodeRepoIfChanged( new NodeAttributes() .withRestartGeneration(nodeSpec.wantedRestartGeneration.orElse(null)) .withRebootGeneration(nodeSpec.wantedRebootGeneration.orElse(0L)) .withDockerImage(new DockerImage("")) .withVespaVersion("")); nodeRepository.markAsReady(nodeSpec.hostname); } private void updateNodeRepoWithCurrentAttributes(final ContainerNodeSpec nodeSpec) { final NodeAttributes nodeAttributes = new NodeAttributes() .withRestartGeneration(nodeSpec.wantedRestartGeneration.orElse(null)) .withRebootGeneration(nodeSpec.wantedRebootGeneration.orElse(0L)) .withDockerImage(nodeSpec.wantedDockerImage.orElse(new DockerImage(""))) .withVespaVersion(vespaVersion.orElse("")); publishStateToNodeRepoIfChanged(nodeAttributes); } private void publishStateToNodeRepoIfChanged(NodeAttributes currentAttributes) { if (!currentAttributes.equals(lastAttributesSet)) { logger.info("Publishing new set of attributes to node repo: " + lastAttributesSet + " -> " + currentAttributes); addDebugMessage("Publishing new set of attributes to node repo: {" + lastAttributesSet + "} -> {" + currentAttributes + "}"); nodeRepository.updateNodeAttributes(hostname, currentAttributes); lastAttributesSet = currentAttributes; } } private void startContainerIfNeeded(final ContainerNodeSpec nodeSpec) { if (! getContainer().isPresent()) { aclMaintainer.ifPresent(AclMaintainer::run); dockerOperations.startContainer(containerName, nodeSpec); metricReceiver.unsetMetricsForContainer(hostname); lastCpuMetric = new CpuUsageReporter(clock.instant()); vespaVersion = dockerOperations.getVespaVersion(containerName); configureContainerMetrics(nodeSpec); addDebugMessage("startContainerIfNeeded: containerState " + containerState + " -> " + RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN); containerState = RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN; } } private void removeContainerIfNeededUpdateContainerState(ContainerNodeSpec nodeSpec) { removeContainerIfNeeded(nodeSpec).ifPresent(existingContainer -> shouldRestartServices(nodeSpec).ifPresent(restartReason -> { logger.info("Will restart services for container " + existingContainer + ": " + restartReason); restartServices(nodeSpec, existingContainer); })); } private Optional<String> shouldRestartServices(ContainerNodeSpec nodeSpec) { if ( ! nodeSpec.wantedRestartGeneration.isPresent()) return Optional.empty(); if (! nodeSpec.currentRestartGeneration.isPresent() || nodeSpec.currentRestartGeneration.get() < nodeSpec.wantedRestartGeneration.get()) { return Optional.of("Restart requested - wanted restart generation has been bumped: " + nodeSpec.currentRestartGeneration.get() + " -> " + nodeSpec.wantedRestartGeneration.get()); } return Optional.empty(); } private void restartServices(ContainerNodeSpec nodeSpec, Container existingContainer) { if (existingContainer.state.isRunning() && nodeSpec.nodeState == Node.State.active) { ContainerName containerName = existingContainer.name; logger.info("Restarting services for " + containerName); orchestratorSuspendNode(); dockerOperations.restartVespaOnNode(containerName); } } @Override public void stopServices() { logger.info("Stopping services for " + containerName); dockerOperations.trySuspendNode(containerName); dockerOperations.stopServicesOnNode(containerName); } private Optional<String> shouldRemoveContainer(ContainerNodeSpec nodeSpec, Container existingContainer) { final Node.State nodeState = nodeSpec.nodeState; if (nodeState == Node.State.dirty || nodeState == Node.State.provisioned) { return Optional.of("Node in state " + nodeState + ", container should no longer be running"); } if (nodeSpec.wantedDockerImage.isPresent() && !nodeSpec.wantedDockerImage.get().equals(existingContainer.image)) { return Optional.of("The node is supposed to run a new Docker image: " + existingContainer + " -> " + nodeSpec.wantedDockerImage.get()); } if (!existingContainer.state.isRunning()) { return Optional.of("Container no longer running"); } return Optional.empty(); } private void scheduleDownLoadIfNeeded(ContainerNodeSpec nodeSpec) { if (dockerOperations.shouldScheduleDownloadOfImage(nodeSpec.wantedDockerImage.get())) { if (nodeSpec.wantedDockerImage.get().equals(imageBeingDownloaded)) { return; } imageBeingDownloaded = nodeSpec.wantedDockerImage.get(); dockerOperations.scheduleDownloadOfImage(containerName, nodeSpec, this::signalWorkToBeDone); } else if (imageBeingDownloaded != null) { imageBeingDownloaded = null; } } private void signalWorkToBeDone() { synchronized (monitor) { if (! workToDoNow) { workToDoNow = true; addDebugMessage("Signaling work to be done"); monitor.notifyAll(); } } } void tick() { boolean isFrozenCopy; synchronized (monitor) { while (! workToDoNow) { long remainder = delaysBetweenEachConvergeMillis - Duration.between(lastConverge, clock.instant()).toMillis(); if (remainder > 0) { try { monitor.wait(remainder); } catch (InterruptedException e) { logger.error("Interrupted, but ignoring this: " + hostname); } } else break; } lastConverge = clock.instant(); workToDoNow = false; if (isFrozen != wantFrozen) { isFrozen = wantFrozen; } isFrozenCopy = isFrozen; } if (isFrozenCopy) { addDebugMessage("tick: isFrozen"); } else { try { converge(); } catch (OrchestratorException e) { logger.info(e.getMessage()); addDebugMessage(e.getMessage()); } catch (Exception e) { numberOfUnhandledException++; logger.error("Unhandled exception, ignoring.", e); addDebugMessage(e.getMessage()); } catch (Throwable t) { logger.error("Unhandled throwable, taking down system.", t); System.exit(234); } } } void converge() { final ContainerNodeSpec nodeSpec = nodeRepository.getContainerNodeSpec(hostname) .orElseThrow(() -> new IllegalStateException(String.format("Node '%s' missing from node repository.", hostname))); if (!nodeSpec.equals(lastNodeSpec)) { addDebugMessage("Loading new node spec: " + nodeSpec.toString()); lastNodeSpec = nodeSpec; metricReceiver.unsetMetricsForContainer(hostname); } switch (nodeSpec.nodeState) { case ready: case reserved: case parked: case failed: removeContainerIfNeededUpdateContainerState(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); break; case active: storageMaintainer.ifPresent(maintainer -> { maintainer.removeOldFilesFromNode(containerName); maintainer.handleCoreDumpsForContainer(containerName, nodeSpec, environment); }); scheduleDownLoadIfNeeded(nodeSpec); if (imageBeingDownloaded != null) { addDebugMessage("Waiting for image to download " + imageBeingDownloaded.asString()); return; } removeContainerIfNeededUpdateContainerState(nodeSpec); startContainerIfNeeded(nodeSpec); runLocalResumeScriptIfNeeded(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); logger.info("Call resume against Orchestrator"); orchestrator.resume(hostname); break; case inactive: storageMaintainer.ifPresent(maintainer -> maintainer.removeOldFilesFromNode(containerName)); removeContainerIfNeededUpdateContainerState(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); break; case provisioned: nodeRepository.markAsDirty(hostname); break; case dirty: storageMaintainer.ifPresent(maintainer -> maintainer.removeOldFilesFromNode(containerName)); removeContainerIfNeededUpdateContainerState(nodeSpec); logger.info("State is " + nodeSpec.nodeState + ", will delete application storage and mark node as ready"); storageMaintainer.ifPresent(maintainer -> maintainer.archiveNodeData(containerName)); updateNodeRepoAndMarkNodeAsReady(nodeSpec); break; default: throw new RuntimeException("UNKNOWN STATE " + nodeSpec.nodeState.name()); } } @SuppressWarnings("unchecked") public void updateContainerNodeMetrics(int numAllocatedContainersOnHost) { final ContainerNodeSpec nodeSpec = lastNodeSpec; if (nodeSpec == null) return; Dimensions.Builder dimensionsBuilder = new Dimensions.Builder() .add("host", hostname) .add("role", "tenants") .add("flavor", nodeSpec.nodeFlavor) .add("state", nodeSpec.nodeState.toString()) .add("zone", environment.getZone()) .add("parentHostname", environment.getParentHostHostname()); vespaVersion.ifPresent(version -> dimensionsBuilder.add("vespaVersion", version)); nodeSpec.owner.ifPresent(owner -> dimensionsBuilder .add("tenantName", owner.tenant) .add("applicationName", owner.application) .add("instanceName", owner.instance) .add("applicationId", owner.tenant + "." + owner.application + "." + owner.instance) .add("app", owner.application + "." + owner.instance)); nodeSpec.membership.ifPresent(membership -> dimensionsBuilder .add("clustertype", membership.clusterType) .add("clusterid", membership.clusterId)); Dimensions dimensions = dimensionsBuilder.build(); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.alive").sample(1); if (containerState == ABSENT) return; Optional<Docker.ContainerStats> containerStats = dockerOperations.getContainerStats(containerName); if ( ! containerStats.isPresent()) return; Docker.ContainerStats stats = containerStats.get(); long currentCpuContainerTotalTime = ((Number) ((Map) stats.getCpuStats().get("cpu_usage")).get("total_usage")).longValue(); long currentCpuSystemTotalTime = ((Number) stats.getCpuStats().get("system_cpu_usage")).longValue(); double cpuPercentageOfHost = lastCpuMetric.getCpuUsagePercentage(currentCpuContainerTotalTime, currentCpuSystemTotalTime); double cpuPercentageOfAllocated = numAllocatedContainersOnHost * cpuPercentageOfHost; metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.cpu.busy.pct").sample(cpuPercentageOfAllocated); addIfNotNull(dimensions, "node.cpu.throttled_time", stats.getCpuStats().get("throttling_data"), "throttled_time"); addIfNotNull(dimensions, "node.memory.limit", stats.getMemoryStats(), "limit"); long memoryUsageTotal = ((Number) stats.getMemoryStats().get("usage")).longValue(); long memoryUsageCache = ((Number) ((Map) stats.getMemoryStats().get("stats")).get("cache")).longValue(); long memoryUsage = memoryUsageTotal - memoryUsageCache; metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.memory.usage").sample(memoryUsage); stats.getNetworks().forEach((interfaceName, interfaceStats) -> { Dimensions netDims = dimensionsBuilder.add("interface", interfaceName).build(); addIfNotNull(netDims, "node.net.in.bytes", interfaceStats, "rx_bytes"); addIfNotNull(netDims, "node.net.in.errors", interfaceStats, "rx_errors"); addIfNotNull(netDims, "node.net.in.dropped", interfaceStats, "rx_dropped"); addIfNotNull(netDims, "node.net.out.bytes", interfaceStats, "tx_bytes"); addIfNotNull(netDims, "node.net.out.errors", interfaceStats, "tx_errors"); addIfNotNull(netDims, "node.net.out.dropped", interfaceStats, "tx_dropped"); }); long bytesInGB = 1 << 30; nodeSpec.minDiskAvailableGb.ifPresent(diskGB -> metricReceiver .declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.disk.limit").sample(diskGB * bytesInGB)); storageMaintainer.ifPresent(maintainer -> maintainer .updateIfNeededAndGetDiskMetricsFor(containerName) .forEach((metricName, metricValue) -> metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, metricName).sample(metricValue.doubleValue()))); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_HOST_LIFE, dimensions, "uptime").sample(lastCpuMetric.getUptime()); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_HOST_LIFE, dimensions, "alive").sample(1); } @SuppressWarnings("unchecked") private void addIfNotNull(Dimensions dimensions, String yamasName, Object metrics, String metricName) { Map<String, Object> metricsMap = (Map<String, Object>) metrics; if (metricsMap == null || !metricsMap.containsKey(metricName)) return; try { metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, yamasName) .sample(((Number) metricsMap.get(metricName)).doubleValue()); } catch (Throwable e) { logger.warning("Failed to update " + yamasName + " metric with value " + metricsMap.get(metricName), e); } } private Optional<Container> getContainer() { if (containerState == ABSENT) return Optional.empty(); return dockerOperations.getContainer(containerName); } @Override public String getHostname() { return hostname; } @Override public boolean isDownloadingImage() { return imageBeingDownloaded != null; } @Override public int getAndResetNumberOfUnhandledExceptions() { int temp = numberOfUnhandledException; numberOfUnhandledException = 0; return temp; } private void configureContainerMetrics(ContainerNodeSpec nodeSpec) { if (! storageMaintainer.isPresent()) return; final Path yamasAgentFolder = environment.pathInNodeAdminFromPathInNode(containerName, "/etc/yamas-agent/"); Path vespaCheckPath = Paths.get(getDefaults().underVespaHome("libexec/yms/yms_check_vespa")); SecretAgentScheduleMaker scheduleMaker = new SecretAgentScheduleMaker("vespa", 60, vespaCheckPath, "all") .withTag("namespace", "Vespa") .withTag("role", "tenants") .withTag("flavor", nodeSpec.nodeFlavor) .withTag("state", nodeSpec.nodeState.toString()) .withTag("zone", environment.getZone()) .withTag("parentHostname", environment.getParentHostHostname()); nodeSpec.owner.ifPresent(owner -> scheduleMaker .withTag("tenantName", owner.tenant) .withTag("app", owner.application + "." + owner.instance)); nodeSpec.membership.ifPresent(membership -> scheduleMaker .withTag("clustertype", membership.clusterType) .withTag("clusterid", membership.clusterId)); vespaVersion.ifPresent(version -> scheduleMaker.withTag("vespaVersion", version)); try { scheduleMaker.writeTo(yamasAgentFolder); final String[] restartYamasAgent = new String[] {"service" , "yamas-agent", "restart"}; dockerOperations.executeCommandInContainerAsRoot(containerName, restartYamasAgent); } catch (IOException e) { throw new RuntimeException("Failed to write secret-agent schedules for " + containerName, e); } } class CpuUsageReporter { private long totalContainerUsage = 0; private long totalSystemUsage = 0; private final Instant created; CpuUsageReporter(Instant created) { this.created = created; } double getCpuUsagePercentage(long currentContainerUsage, long currentSystemUsage) { long deltaSystemUsage = currentSystemUsage - totalSystemUsage; double cpuUsagePct = (deltaSystemUsage == 0 || totalSystemUsage == 0) ? 0 : 100.0 * (currentContainerUsage - totalContainerUsage) / deltaSystemUsage; totalContainerUsage = currentContainerUsage; totalSystemUsage = currentSystemUsage; return cpuUsagePct; } long getUptime() { return Duration.between(created, clock.instant()).getSeconds(); } } private void orchestratorSuspendNode() { logger.info("Ask Orchestrator for permission to suspend node " + hostname); orchestrator.suspend(hostname); } }
class NodeAgentImpl implements NodeAgent { private final AtomicBoolean terminated = new AtomicBoolean(false); private boolean isFrozen = true; private boolean wantFrozen = false; private boolean workToDoNow = true; private final Object monitor = new Object(); private final PrefixLogger logger; private DockerImage imageBeingDownloaded = null; private final String hostname; private final ContainerName containerName; private final NodeRepository nodeRepository; private final Orchestrator orchestrator; private final DockerOperations dockerOperations; private final Optional<StorageMaintainer> storageMaintainer; private final MetricReceiverWrapper metricReceiver; private final Environment environment; private final Clock clock; private final Optional<AclMaintainer> aclMaintainer; private final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private final LinkedList<String> debugMessages = new LinkedList<>(); private long delaysBetweenEachConvergeMillis = 30_000; private int numberOfUnhandledException = 0; private Instant lastConverge; private Thread loopThread; enum ContainerState { ABSENT, RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN, RUNNING } private ContainerState containerState = ABSENT; private NodeAttributes lastAttributesSet = null; private ContainerNodeSpec lastNodeSpec = null; private CpuUsageReporter lastCpuMetric; private Optional<String> vespaVersion = Optional.empty(); public NodeAgentImpl( final String hostName, final NodeRepository nodeRepository, final Orchestrator orchestrator, final DockerOperations dockerOperations, final Optional<StorageMaintainer> storageMaintainer, final MetricReceiverWrapper metricReceiver, final Environment environment, final Clock clock, final Optional<AclMaintainer> aclMaintainer) { this.nodeRepository = nodeRepository; this.orchestrator = orchestrator; this.hostname = hostName; this.containerName = ContainerName.fromHostname(hostName); this.dockerOperations = dockerOperations; this.storageMaintainer = storageMaintainer; this.logger = PrefixLogger.getNodeAgentLogger(NodeAgentImpl.class, containerName); this.metricReceiver = metricReceiver; this.environment = environment; this.clock = clock; this.aclMaintainer = aclMaintainer; this.lastConverge = clock.instant(); lastCpuMetric = new CpuUsageReporter(clock.instant()); dockerOperations.getContainer(containerName) .ifPresent(container -> { if (container.state.isRunning()) { vespaVersion = dockerOperations.getVespaVersion(container.name); lastCpuMetric = new CpuUsageReporter(container.created); } containerState = RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN; }); } @Override public boolean setFrozen(boolean frozen) { synchronized (monitor) { if (wantFrozen != frozen) { wantFrozen = frozen; addDebugMessage(wantFrozen ? "Freezing" : "Unfreezing"); signalWorkToBeDone(); } return isFrozen == frozen; } } private void addDebugMessage(String message) { synchronized (debugMessages) { while (debugMessages.size() > 1000) { debugMessages.pop(); } logger.debug(message); debugMessages.add("[" + sdf.format(new Date()) + "] " + message); } } @Override public Map<String, Object> debugInfo() { Map<String, Object> debug = new LinkedHashMap<>(); debug.put("Hostname", hostname); debug.put("isFrozen", isFrozen); debug.put("wantFrozen", wantFrozen); debug.put("terminated", terminated); debug.put("workToDoNow", workToDoNow); synchronized (debugMessages) { debug.put("History", new LinkedList<>(debugMessages)); } debug.put("Node repo state", lastNodeSpec.nodeState.name()); return debug; } @Override public void start(int intervalMillis) { addDebugMessage("Starting with interval " + intervalMillis + "ms"); delaysBetweenEachConvergeMillis = intervalMillis; if (loopThread != null) { throw new RuntimeException("Can not restart a node agent."); } loopThread = new Thread(() -> { while (! terminated.get()) tick(); }); loopThread.setName("tick-" + hostname); loopThread.start(); } @Override public void stop() { addDebugMessage("Stopping"); if (!terminated.compareAndSet(false, true)) { throw new RuntimeException("Can not re-stop a node agent."); } signalWorkToBeDone(); try { loopThread.join(10000); if (loopThread.isAlive()) { logger.error("Could not stop host thread " + hostname); } } catch (InterruptedException e1) { logger.error("Interrupted; Could not stop host thread " + hostname); } } private void experimentalWriteFile(final ContainerNodeSpec nodeSpec) { try { FilebeatConfigProvider filebeatConfigProvider = new FilebeatConfigProvider(environment); Optional<String> config = filebeatConfigProvider.getConfig(nodeSpec); if (! config.isPresent()) { logger.error("Was not able to generate a config for filebeat, ignoring filebeat file creation." + nodeSpec.toString()); return; } Path filebeatPath = environment.pathInNodeAdminFromPathInNode(containerName, "/etc/filebeat/filebeat.yml"); Files.write(filebeatPath, config.get().getBytes()); logger.info("Wrote filebeat config."); } catch (Throwable t) { logger.error("Failed writing filebeat config; " + nodeSpec, t); } } private void runLocalResumeScriptIfNeeded(final ContainerNodeSpec nodeSpec) { if (containerState != RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN) { return; } experimentalWriteFile(nodeSpec); addDebugMessage("Starting optional node program resume command"); logger.info("Starting optional node program resume command"); dockerOperations.resumeNode(containerName); containerState = RUNNING; } private void updateNodeRepoAndMarkNodeAsReady(ContainerNodeSpec nodeSpec) { publishStateToNodeRepoIfChanged( new NodeAttributes() .withRestartGeneration(nodeSpec.wantedRestartGeneration.orElse(null)) .withRebootGeneration(nodeSpec.wantedRebootGeneration.orElse(0L)) .withDockerImage(new DockerImage("")) .withVespaVersion("")); nodeRepository.markAsReady(nodeSpec.hostname); } private void updateNodeRepoWithCurrentAttributes(final ContainerNodeSpec nodeSpec) { final NodeAttributes nodeAttributes = new NodeAttributes() .withRestartGeneration(nodeSpec.wantedRestartGeneration.orElse(null)) .withRebootGeneration(nodeSpec.wantedRebootGeneration.orElse(0L)) .withDockerImage(nodeSpec.wantedDockerImage.orElse(new DockerImage(""))) .withVespaVersion(vespaVersion.orElse("")); publishStateToNodeRepoIfChanged(nodeAttributes); } private void publishStateToNodeRepoIfChanged(NodeAttributes currentAttributes) { if (!currentAttributes.equals(lastAttributesSet)) { logger.info("Publishing new set of attributes to node repo: " + lastAttributesSet + " -> " + currentAttributes); addDebugMessage("Publishing new set of attributes to node repo: {" + lastAttributesSet + "} -> {" + currentAttributes + "}"); nodeRepository.updateNodeAttributes(hostname, currentAttributes); lastAttributesSet = currentAttributes; } } private void startContainerIfNeeded(final ContainerNodeSpec nodeSpec) { if (! getContainer().isPresent()) { aclMaintainer.ifPresent(AclMaintainer::run); dockerOperations.startContainer(containerName, nodeSpec); metricReceiver.unsetMetricsForContainer(hostname); lastCpuMetric = new CpuUsageReporter(clock.instant()); vespaVersion = dockerOperations.getVespaVersion(containerName); configureContainerMetrics(nodeSpec); addDebugMessage("startContainerIfNeeded: containerState " + containerState + " -> " + RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN); containerState = RUNNING_HOWEVER_RESUME_SCRIPT_NOT_RUN; } } private void removeContainerIfNeededUpdateContainerState(ContainerNodeSpec nodeSpec) { removeContainerIfNeeded(nodeSpec).ifPresent(existingContainer -> shouldRestartServices(nodeSpec).ifPresent(restartReason -> { logger.info("Will restart services for container " + existingContainer + ": " + restartReason); restartServices(nodeSpec, existingContainer); })); } private Optional<String> shouldRestartServices(ContainerNodeSpec nodeSpec) { if ( ! nodeSpec.wantedRestartGeneration.isPresent()) return Optional.empty(); if (! nodeSpec.currentRestartGeneration.isPresent() || nodeSpec.currentRestartGeneration.get() < nodeSpec.wantedRestartGeneration.get()) { return Optional.of("Restart requested - wanted restart generation has been bumped: " + nodeSpec.currentRestartGeneration.get() + " -> " + nodeSpec.wantedRestartGeneration.get()); } return Optional.empty(); } private void restartServices(ContainerNodeSpec nodeSpec, Container existingContainer) { if (existingContainer.state.isRunning() && nodeSpec.nodeState == Node.State.active) { ContainerName containerName = existingContainer.name; logger.info("Restarting services for " + containerName); orchestratorSuspendNode(); dockerOperations.restartVespaOnNode(containerName); } } @Override public void stopServices() { logger.info("Stopping services for " + containerName); dockerOperations.trySuspendNode(containerName); dockerOperations.stopServicesOnNode(containerName); } private Optional<String> shouldRemoveContainer(ContainerNodeSpec nodeSpec, Container existingContainer) { final Node.State nodeState = nodeSpec.nodeState; if (nodeState == Node.State.dirty || nodeState == Node.State.provisioned) { return Optional.of("Node in state " + nodeState + ", container should no longer be running"); } if (nodeSpec.wantedDockerImage.isPresent() && !nodeSpec.wantedDockerImage.get().equals(existingContainer.image)) { return Optional.of("The node is supposed to run a new Docker image: " + existingContainer + " -> " + nodeSpec.wantedDockerImage.get()); } if (!existingContainer.state.isRunning()) { return Optional.of("Container no longer running"); } return Optional.empty(); } private void scheduleDownLoadIfNeeded(ContainerNodeSpec nodeSpec) { if (dockerOperations.shouldScheduleDownloadOfImage(nodeSpec.wantedDockerImage.get())) { if (nodeSpec.wantedDockerImage.get().equals(imageBeingDownloaded)) { return; } imageBeingDownloaded = nodeSpec.wantedDockerImage.get(); dockerOperations.scheduleDownloadOfImage(containerName, nodeSpec, this::signalWorkToBeDone); } else if (imageBeingDownloaded != null) { imageBeingDownloaded = null; } } private void signalWorkToBeDone() { synchronized (monitor) { if (! workToDoNow) { workToDoNow = true; addDebugMessage("Signaling work to be done"); monitor.notifyAll(); } } } void tick() { boolean isFrozenCopy; synchronized (monitor) { while (! workToDoNow) { long remainder = delaysBetweenEachConvergeMillis - Duration.between(lastConverge, clock.instant()).toMillis(); if (remainder > 0) { try { monitor.wait(remainder); } catch (InterruptedException e) { logger.error("Interrupted, but ignoring this: " + hostname); } } else break; } lastConverge = clock.instant(); workToDoNow = false; if (isFrozen != wantFrozen) { isFrozen = wantFrozen; } isFrozenCopy = isFrozen; } if (isFrozenCopy) { addDebugMessage("tick: isFrozen"); } else { try { converge(); } catch (OrchestratorException e) { logger.info(e.getMessage()); addDebugMessage(e.getMessage()); } catch (Exception e) { numberOfUnhandledException++; logger.error("Unhandled exception, ignoring.", e); addDebugMessage(e.getMessage()); } catch (Throwable t) { logger.error("Unhandled throwable, taking down system.", t); System.exit(234); } } } void converge() { final ContainerNodeSpec nodeSpec = nodeRepository.getContainerNodeSpec(hostname) .orElseThrow(() -> new IllegalStateException(String.format("Node '%s' missing from node repository.", hostname))); if (!nodeSpec.equals(lastNodeSpec)) { addDebugMessage("Loading new node spec: " + nodeSpec.toString()); lastNodeSpec = nodeSpec; metricReceiver.unsetMetricsForContainer(hostname); } switch (nodeSpec.nodeState) { case ready: case reserved: case parked: case failed: removeContainerIfNeededUpdateContainerState(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); break; case active: storageMaintainer.ifPresent(maintainer -> { maintainer.removeOldFilesFromNode(containerName); maintainer.handleCoreDumpsForContainer(containerName, nodeSpec, environment); }); scheduleDownLoadIfNeeded(nodeSpec); if (imageBeingDownloaded != null) { addDebugMessage("Waiting for image to download " + imageBeingDownloaded.asString()); return; } removeContainerIfNeededUpdateContainerState(nodeSpec); startContainerIfNeeded(nodeSpec); runLocalResumeScriptIfNeeded(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); logger.info("Call resume against Orchestrator"); orchestrator.resume(hostname); break; case inactive: storageMaintainer.ifPresent(maintainer -> maintainer.removeOldFilesFromNode(containerName)); removeContainerIfNeededUpdateContainerState(nodeSpec); updateNodeRepoWithCurrentAttributes(nodeSpec); break; case provisioned: nodeRepository.markAsDirty(hostname); break; case dirty: storageMaintainer.ifPresent(maintainer -> maintainer.removeOldFilesFromNode(containerName)); removeContainerIfNeededUpdateContainerState(nodeSpec); logger.info("State is " + nodeSpec.nodeState + ", will delete application storage and mark node as ready"); storageMaintainer.ifPresent(maintainer -> maintainer.archiveNodeData(containerName)); updateNodeRepoAndMarkNodeAsReady(nodeSpec); break; default: throw new RuntimeException("UNKNOWN STATE " + nodeSpec.nodeState.name()); } } @SuppressWarnings("unchecked") public void updateContainerNodeMetrics(int numAllocatedContainersOnHost) { final ContainerNodeSpec nodeSpec = lastNodeSpec; if (nodeSpec == null) return; Dimensions.Builder dimensionsBuilder = new Dimensions.Builder() .add("host", hostname) .add("role", "tenants") .add("flavor", nodeSpec.nodeFlavor) .add("state", nodeSpec.nodeState.toString()) .add("zone", environment.getZone()) .add("parentHostname", environment.getParentHostHostname()); vespaVersion.ifPresent(version -> dimensionsBuilder.add("vespaVersion", version)); nodeSpec.owner.ifPresent(owner -> dimensionsBuilder .add("tenantName", owner.tenant) .add("applicationName", owner.application) .add("instanceName", owner.instance) .add("applicationId", owner.tenant + "." + owner.application + "." + owner.instance) .add("app", owner.application + "." + owner.instance)); nodeSpec.membership.ifPresent(membership -> dimensionsBuilder .add("clustertype", membership.clusterType) .add("clusterid", membership.clusterId)); Dimensions dimensions = dimensionsBuilder.build(); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.alive").sample(1); if (containerState == ABSENT) return; Optional<Docker.ContainerStats> containerStats = dockerOperations.getContainerStats(containerName); if ( ! containerStats.isPresent()) return; Docker.ContainerStats stats = containerStats.get(); long currentCpuContainerTotalTime = ((Number) ((Map) stats.getCpuStats().get("cpu_usage")).get("total_usage")).longValue(); long currentCpuSystemTotalTime = ((Number) stats.getCpuStats().get("system_cpu_usage")).longValue(); double cpuPercentageOfHost = lastCpuMetric.getCpuUsagePercentage(currentCpuContainerTotalTime, currentCpuSystemTotalTime); double cpuPercentageOfAllocated = numAllocatedContainersOnHost * cpuPercentageOfHost; metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.cpu.busy.pct").sample(cpuPercentageOfAllocated); addIfNotNull(dimensions, "node.cpu.throttled_time", stats.getCpuStats().get("throttling_data"), "throttled_time"); addIfNotNull(dimensions, "node.memory.limit", stats.getMemoryStats(), "limit"); long memoryUsageTotal = ((Number) stats.getMemoryStats().get("usage")).longValue(); long memoryUsageCache = ((Number) ((Map) stats.getMemoryStats().get("stats")).get("cache")).longValue(); long memoryUsage = memoryUsageTotal - memoryUsageCache; metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.memory.usage").sample(memoryUsage); stats.getNetworks().forEach((interfaceName, interfaceStats) -> { Dimensions netDims = dimensionsBuilder.add("interface", interfaceName).build(); addIfNotNull(netDims, "node.net.in.bytes", interfaceStats, "rx_bytes"); addIfNotNull(netDims, "node.net.in.errors", interfaceStats, "rx_errors"); addIfNotNull(netDims, "node.net.in.dropped", interfaceStats, "rx_dropped"); addIfNotNull(netDims, "node.net.out.bytes", interfaceStats, "tx_bytes"); addIfNotNull(netDims, "node.net.out.errors", interfaceStats, "tx_errors"); addIfNotNull(netDims, "node.net.out.dropped", interfaceStats, "tx_dropped"); }); long bytesInGB = 1 << 30; nodeSpec.minDiskAvailableGb.ifPresent(diskGB -> metricReceiver .declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "node.disk.limit").sample(diskGB * bytesInGB)); storageMaintainer.ifPresent(maintainer -> maintainer .updateIfNeededAndGetDiskMetricsFor(containerName) .forEach((metricName, metricValue) -> metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, metricName).sample(metricValue.doubleValue()))); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_HOST_LIFE, dimensions, "uptime").sample(lastCpuMetric.getUptime()); metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_HOST_LIFE, dimensions, "alive").sample(1); } @SuppressWarnings("unchecked") private void addIfNotNull(Dimensions dimensions, String yamasName, Object metrics, String metricName) { Map<String, Object> metricsMap = (Map<String, Object>) metrics; if (metricsMap == null || !metricsMap.containsKey(metricName)) return; try { metricReceiver.declareGauge(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, yamasName) .sample(((Number) metricsMap.get(metricName)).doubleValue()); } catch (Throwable e) { logger.warning("Failed to update " + yamasName + " metric with value " + metricsMap.get(metricName), e); } } private Optional<Container> getContainer() { if (containerState == ABSENT) return Optional.empty(); return dockerOperations.getContainer(containerName); } @Override public String getHostname() { return hostname; } @Override public boolean isDownloadingImage() { return imageBeingDownloaded != null; } @Override public int getAndResetNumberOfUnhandledExceptions() { int temp = numberOfUnhandledException; numberOfUnhandledException = 0; return temp; } private void configureContainerMetrics(ContainerNodeSpec nodeSpec) { if (! storageMaintainer.isPresent()) return; final Path yamasAgentFolder = environment.pathInNodeAdminFromPathInNode(containerName, "/etc/yamas-agent/"); Path vespaCheckPath = Paths.get(getDefaults().underVespaHome("libexec/yms/yms_check_vespa")); SecretAgentScheduleMaker scheduleMaker = new SecretAgentScheduleMaker("vespa", 60, vespaCheckPath, "all") .withTag("namespace", "Vespa") .withTag("role", "tenants") .withTag("flavor", nodeSpec.nodeFlavor) .withTag("state", nodeSpec.nodeState.toString()) .withTag("zone", environment.getZone()) .withTag("parentHostname", environment.getParentHostHostname()); nodeSpec.owner.ifPresent(owner -> scheduleMaker .withTag("tenantName", owner.tenant) .withTag("app", owner.application + "." + owner.instance)); nodeSpec.membership.ifPresent(membership -> scheduleMaker .withTag("clustertype", membership.clusterType) .withTag("clusterid", membership.clusterId)); vespaVersion.ifPresent(version -> scheduleMaker.withTag("vespaVersion", version)); try { scheduleMaker.writeTo(yamasAgentFolder); final String[] restartYamasAgent = new String[] {"service" , "yamas-agent", "restart"}; dockerOperations.executeCommandInContainerAsRoot(containerName, restartYamasAgent); } catch (IOException e) { throw new RuntimeException("Failed to write secret-agent schedules for " + containerName, e); } } class CpuUsageReporter { private long totalContainerUsage = 0; private long totalSystemUsage = 0; private final Instant created; CpuUsageReporter(Instant created) { this.created = created; } double getCpuUsagePercentage(long currentContainerUsage, long currentSystemUsage) { long deltaSystemUsage = currentSystemUsage - totalSystemUsage; double cpuUsagePct = (deltaSystemUsage == 0 || totalSystemUsage == 0) ? 0 : 100.0 * (currentContainerUsage - totalContainerUsage) / deltaSystemUsage; totalContainerUsage = currentContainerUsage; totalSystemUsage = currentSystemUsage; return cpuUsagePct; } long getUptime() { return Duration.between(created, clock.instant()).getSeconds(); } } private void orchestratorSuspendNode() { logger.info("Ask Orchestrator for permission to suspend node " + hostname); orchestrator.suspend(hostname); } }
this is the most difficult model I have ever seen :laughing:
public JsonWriter toJson(JsonWriter jsonWriter) { return null; }
return null;
public JsonWriter toJson(JsonWriter jsonWriter) { return toJsonInternal(jsonWriter, "foo"); }
class Foo implements JsonCapable<Foo> { @JsonProperty(value = "properties.bar") private String bar; @JsonProperty(value = "properties.props.baz") private List<String> baz; @JsonProperty(value = "properties.props.q.qux") private Map<String, String> qux; @JsonProperty(value = "properties.more\\.props") private String moreProps; @JsonProperty(value = "props.empty") private Integer empty; @JsonProperty(value = "") private Map<String, Object> additionalProperties; public String bar() { return bar; } public void bar(String bar) { this.bar = bar; } public List<String> baz() { return baz; } public void baz(List<String> baz) { this.baz = baz; } public Map<String, String> qux() { return qux; } public void qux(Map<String, String> qux) { this.qux = qux; } public String moreProps() { return moreProps; } public void moreProps(String moreProps) { this.moreProps = moreProps; } public Integer empty() { return empty; } public void empty(Integer empty) { this.empty = empty; } public Map<String, Object> additionalProperties() { return additionalProperties; } public void additionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; } @Override }
class Foo implements JsonCapable<Foo> { private String bar; private List<String> baz; private Map<String, String> qux; private String moreProps; private Integer empty; private Map<String, Object> additionalProperties; public String bar() { return bar; } public void bar(String bar) { this.bar = bar; } public List<String> baz() { return baz; } public void baz(List<String> baz) { this.baz = baz; } public Map<String, String> qux() { return qux; } public void qux(Map<String, String> qux) { this.qux = qux; } public String moreProps() { return moreProps; } public void moreProps(String moreProps) { this.moreProps = moreProps; } public Integer empty() { return empty; } public void empty(Integer empty) { this.empty = empty; } public Map<String, Object> additionalProperties() { return additionalProperties; } public void additionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; } @Override JsonWriter toJsonInternal(JsonWriter jsonWriter, String type) { jsonWriter.writeStartObject() .writeStringField("$type", type); if (bar != null || baz != null || qux != null || moreProps != null) { jsonWriter.writeFieldName("properties") .writeStartObject(); JsonUtils.writeNonNullStringField(jsonWriter, "bar", bar); if (baz != null || qux != null) { jsonWriter.writeFieldName("props") .writeStartObject(); if (baz != null) { JsonUtils.writeArray(jsonWriter, "baz", baz, JsonWriter::writeString); } if (qux != null) { jsonWriter.writeFieldName("q") .writeStartObject() .writeFieldName("qux") .writeStartObject(); qux.forEach(jsonWriter::writeStringField); jsonWriter.writeEndObject() .writeEndObject(); } jsonWriter.writeEndObject(); } JsonUtils.writeNonNullStringField(jsonWriter, "more.props", moreProps); jsonWriter.writeEndObject(); } if (empty != null) { jsonWriter.writeFieldName("props") .writeStartObject() .writeIntField("empty", empty) .writeEndObject(); } if (additionalProperties != null) { additionalProperties.forEach((key, value) -> JsonUtils.writeUntypedField(jsonWriter.writeFieldName(key), value)); } return jsonWriter.writeEndObject().flush(); } public static Foo fromJson(JsonReader jsonReader) { return fromJsonInternal(jsonReader, null); } static Foo fromJsonInternal(JsonReader jsonReader, String expectedType) { return JsonUtils.readObject(jsonReader, reader -> { String type = null; String bar = null; List<String> baz = null; Map<String, String> qux = null; String moreProps = null; Integer empty = null; Map<String, Object> additionalProperties = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); reader.nextToken(); if ("$type".equals(fieldName)) { type = reader.getStringValue(); } else if ("properties".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("bar".equals(fieldName)) { bar = reader.getStringValue(); } else if ("more.props".equals(fieldName)) { moreProps = reader.getStringValue(); } else if ("props".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("baz".equals(fieldName)) { baz = JsonUtils.readArray(reader, r -> JsonUtils.getNullableProperty(r, JsonReader::getStringValue)); } else if ("q".equals(fieldName)) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("qux".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { if (qux == null) { qux = new LinkedHashMap<>(); } while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); qux.put(fieldName, JsonUtils.getNullableProperty(reader, JsonReader::getStringValue)); } } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } } else if ("props".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("empty".equals(fieldName)) { empty = reader.currentToken() == JsonToken.NULL ? null : reader.getIntValue(); } else { reader.skipChildren(); } } } else { if (additionalProperties == null) { additionalProperties = new LinkedHashMap<>(); } additionalProperties.put(fieldName, JsonUtils.readUntypedField(reader)); } } if (expectedType != null && type != null && !Objects.equals(expectedType, type)) { throw new IllegalStateException("Discriminator field '$type' didn't match expected value: " + "'" + expectedType + "'. It was: '" + type + "'."); } if ((expectedType == null && type == null) || "foo".equals(type)) { Foo foo = new Foo(); foo.bar(bar); foo.baz(baz); foo.qux(qux); foo.moreProps(moreProps); foo.empty(empty); foo.additionalProperties(additionalProperties); return foo; } else if ("foochild".equals(expectedType) || "foochild".equals(type)) { FooChild fooChild = new FooChild(); fooChild.bar(bar); fooChild.baz(baz); fooChild.qux(qux); fooChild.moreProps(moreProps); fooChild.empty(empty); fooChild.additionalProperties(additionalProperties); return fooChild; } else { throw new IllegalStateException("Invalid discriminator value '" + reader.getStringValue() + "', expected: 'foo' or 'foochild'."); } }); } }
I wanted to keep the declaration of a class and its features encapsulated in single class. Agree that this doesn't play well with the rest of the `DoFnSignaturesTest`. But because there is effort in the direction of superseding all this with "pipeline features", I think it is fine to keep is as is for now and drop it as part of the effort later.
public void testAllDoFnFeatures() { tests.forEach(FeatureTest::test); }
tests.forEach(FeatureTest::test);
public void testAllDoFnFeatures() { tests.forEach(FeatureTest::test); }
class Splittable extends DoFn<KV<String, Long>, String> implements FeatureTest { @ProcessElement public void process(ProcessContext c, RestrictionTracker<OffsetRange, ?> tracker) {} @GetInitialRestriction public OffsetRange getInitialRange(@Element KV<String, Long> element) { return new OffsetRange(0L, element.getValue()); } @Override public void test() { assertThat(DoFnSignatures.isSplittable(this), SerializableMatchers.equalTo(true)); assertThat(DoFnSignatures.isStateful(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesTimers(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesBagState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesMapState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesSetState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesValueState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesWatermarkHold(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.requiresTimeSortedInput(this), SerializableMatchers.equalTo(false)); } }
class Splittable extends DoFn<KV<String, Long>, String> implements FeatureTest { @ProcessElement public void process(ProcessContext c, RestrictionTracker<OffsetRange, ?> tracker) {} @GetInitialRestriction public OffsetRange getInitialRange(@Element KV<String, Long> element) { return new OffsetRange(0L, element.getValue()); } @Override public void test() { assertThat(DoFnSignatures.isSplittable(this), SerializableMatchers.equalTo(true)); assertThat(DoFnSignatures.isStateful(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesTimers(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesBagState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesMapState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesSetState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesValueState(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.usesWatermarkHold(this), SerializableMatchers.equalTo(false)); assertThat(DoFnSignatures.requiresTimeSortedInput(this), SerializableMatchers.equalTo(false)); } }
hostNames is supposed to contain parentHostName already (if applicable)
public void suspend(String parentHostName, List<String> hostNames) { final BatchOperationResult batchOperationResult; try { String params = String.join("&hostname=", hostNames); String url = String.format("%s/%s?hostname=%s", ORCHESTRATOR_PATH_PREFIX_HOST_SUSPENSION_API, parentHostName, params); batchOperationResult = configServerApi.put(url, Optional.empty(), BatchOperationResult.class); } catch (HttpException e) { throw new OrchestratorException("Failed to batch suspend for " + parentHostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on batch suspend for " + parentHostName + ", with nodes " + hostNames, e); } batchOperationResult.getFailureReason().ifPresent(reason -> { throw new OrchestratorException(reason); }); }
String url = String.format("%s/%s?hostname=%s", ORCHESTRATOR_PATH_PREFIX_HOST_SUSPENSION_API,
public void suspend(String parentHostName, List<String> hostNames) { final BatchOperationResult batchOperationResult; try { String params = String.join("&hostname=", hostNames); String url = String.format("%s/%s?hostname=%s", ORCHESTRATOR_PATH_PREFIX_HOST_SUSPENSION_API, parentHostName, params); batchOperationResult = configServerApi.put(url, Optional.empty(), BatchOperationResult.class); } catch (HttpException e) { throw new OrchestratorException("Failed to batch suspend for " + parentHostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on batch suspend for " + parentHostName + ", with nodes " + hostNames, e); } batchOperationResult.getFailureReason().ifPresent(reason -> { throw new OrchestratorException(reason); }); }
class OrchestratorImpl implements Orchestrator { private static final String ORCHESTRATOR_PATH_PREFIX = "/orchestrator"; static final String ORCHESTRATOR_PATH_PREFIX_HOST_API = ORCHESTRATOR_PATH_PREFIX + HostApi.PATH_PREFIX; static final String ORCHESTRATOR_PATH_PREFIX_HOST_SUSPENSION_API = ORCHESTRATOR_PATH_PREFIX + HostSuspensionApi.PATH_PREFIX; private final ConfigServerApi configServerApi; public OrchestratorImpl(ConfigServerApi configServerApi) { this.configServerApi = configServerApi; } @Override public void suspend(final String hostName) { UpdateHostResponse response; try { response = configServerApi.put(getSuspendPath(hostName), Optional.empty(), /* body */ UpdateHostResponse.class); } catch (HttpException.NotFoundException n) { throw new OrchestratorNotFoundException("Failed to suspend " + hostName + ", host not found"); } catch (HttpException e) { throw new OrchestratorException("Failed to suspend " + hostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on suspend", e); } Optional.ofNullable(response.reason()).ifPresent(reason -> { throw new OrchestratorException(reason.message()); }); } @Override @Override public void resume(final String hostName) { UpdateHostResponse response; try { String path = getSuspendPath(hostName); response = configServerApi.delete(path, UpdateHostResponse.class); } catch (HttpException.NotFoundException n) { throw new OrchestratorNotFoundException("Failed to resume " + hostName + ", host not found"); } catch (HttpException e) { throw new OrchestratorException("Failed to suspend " + hostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on resume", e); } Optional.ofNullable(response.reason()).ifPresent(reason -> { throw new OrchestratorException(reason.message()); }); } private String getSuspendPath(String hostName) { return ORCHESTRATOR_PATH_PREFIX_HOST_API + "/" + hostName + "/suspended"; } }
class OrchestratorImpl implements Orchestrator { private static final String ORCHESTRATOR_PATH_PREFIX = "/orchestrator"; static final String ORCHESTRATOR_PATH_PREFIX_HOST_API = ORCHESTRATOR_PATH_PREFIX + HostApi.PATH_PREFIX; static final String ORCHESTRATOR_PATH_PREFIX_HOST_SUSPENSION_API = ORCHESTRATOR_PATH_PREFIX + HostSuspensionApi.PATH_PREFIX; private final ConfigServerApi configServerApi; public OrchestratorImpl(ConfigServerApi configServerApi) { this.configServerApi = configServerApi; } @Override public void suspend(final String hostName) { UpdateHostResponse response; try { response = configServerApi.put(getSuspendPath(hostName), Optional.empty(), /* body */ UpdateHostResponse.class); } catch (HttpException.NotFoundException n) { throw new OrchestratorNotFoundException("Failed to suspend " + hostName + ", host not found"); } catch (HttpException e) { throw new OrchestratorException("Failed to suspend " + hostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on suspend", e); } Optional.ofNullable(response.reason()).ifPresent(reason -> { throw new OrchestratorException(reason.message()); }); } @Override @Override public void resume(final String hostName) { UpdateHostResponse response; try { String path = getSuspendPath(hostName); response = configServerApi.delete(path, UpdateHostResponse.class); } catch (HttpException.NotFoundException n) { throw new OrchestratorNotFoundException("Failed to resume " + hostName + ", host not found"); } catch (HttpException e) { throw new OrchestratorException("Failed to suspend " + hostName + ": " + e.toString()); } catch (Exception e) { throw new RuntimeException("Got error on resume", e); } Optional.ofNullable(response.reason()).ifPresent(reason -> { throw new OrchestratorException(reason.message()); }); } private String getSuspendPath(String hostName) { return ORCHESTRATOR_PATH_PREFIX_HOST_API + "/" + hostName + "/suspended"; } }
```suggestion // Java uses BigDecimal so 0.2 * 170 = 63.9... // BigDecimal.longValue() will round down to 63 instead of the expected 64 ```
public void testTrySplitForProcessSplitOnMiddleWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 1, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); }
createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2);
public void testTrySplitForProcessSplitOnMiddleWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 1, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); }
class SplitTest { private IntervalWindow window1; private IntervalWindow window2; private IntervalWindow window3; private WindowedValue<String> currentElement; private OffsetRange currentRestriction; private Instant currentWatermarkEstimatorState; KV<Instant, Instant> watermarkAndState; private KV<WindowedValue, WindowedValue> createSplitInWindow( OffsetRange primaryRestriction, OffsetRange residualRestriction, BoundedWindow window) { return KV.of( WindowedValue.of( KV.of( currentElement.getValue(), KV.of(primaryRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), window, currentElement.getPane()), WindowedValue.of( KV.of( currentElement.getValue(), KV.of(residualRestriction, watermarkAndState.getValue())), currentElement.getTimestamp(), window, currentElement.getPane())); } private KV<WindowedValue, WindowedValue> createSplitAcrossWindows( List<BoundedWindow> primaryWindows, List<BoundedWindow> residualWindows) { return KV.of( primaryWindows.isEmpty() ? null : WindowedValue.of( KV.of( currentElement.getValue(), KV.of(currentRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), primaryWindows, currentElement.getPane()), residualWindows.isEmpty() ? null : WindowedValue.of( KV.of( currentElement.getValue(), KV.of(currentRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), residualWindows, currentElement.getPane())); } @Before public void setUp() { window1 = new IntervalWindow(Instant.ofEpochMilli(0), Instant.ofEpochMilli(10)); window2 = new IntervalWindow(Instant.ofEpochMilli(10), Instant.ofEpochMilli(20)); window3 = new IntervalWindow(Instant.ofEpochMilli(20), Instant.ofEpochMilli(30)); currentElement = WindowedValue.of( "a", Instant.ofEpochMilli(57), ImmutableList.of(window1, window2, window3), PaneInfo.NO_FIRING); currentRestriction = new OffsetRange(0L, 100L); currentWatermarkEstimatorState = Instant.ofEpochMilli(21); watermarkAndState = KV.of(Instant.ofEpochMilli(42), Instant.ofEpochMilli(42)); } @Test public void testScaleProgress() throws Exception { Progress elementProgress = Progress.from(2, 8); Progress scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 1); assertEquals(2, scaledResult.getWorkCompleted(), 0.0); assertEquals(8, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 3); assertEquals(2, scaledResult.getWorkCompleted(), 0.0); assertEquals(28, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 1, 3); assertEquals(12, scaledResult.getWorkCompleted(), 0.0); assertEquals(18, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 2, 3); assertEquals(22, scaledResult.getWorkCompleted(), 0.0); assertEquals(8, scaledResult.getWorkRemaining(), 0.0); } @Test public void testTrySplitForProcessCheckpointOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessCheckpointOnFirstWindowAfterOneSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, tracker, watermarkAndState, 0, 2); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 84), new OffsetRange(84, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test @Test public void testTrySplitForProcessSplitOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 2, 3); assertEquals(3, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 44), new OffsetRange(44, 100), window3); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of()); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnFirstWindowFallback() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(100L); assertNull(tracker.trySplit(0.0)); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnLastWindowWhenNoElementSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(100L); assertNull(tracker.trySplit(0.0)); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0, tracker, watermarkAndState, 2, 3); assertNull(result); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundUp() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.6, tracker, watermarkAndState, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundDown() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.3, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundDownOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.9, tracker, watermarkAndState, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } private HandlesSplits createSplitDelegate( double progress, double expectedFraction, HandlesSplits.SplitResult result) { return new HandlesSplits() { @Override public SplitResult trySplit(double fractionOfRemainder) { checkArgument(fractionOfRemainder == expectedFraction); return result; } @Override public double getProgress() { return progress; } @Override public String getPtranformId() { return "transfrom_id"; } @Override public String getMainInputId() { return "input_id"; } @Override public Collection<String> getOutputIds() { return ImmutableSet.of("output"); } }; } @Test public void testTrySplitForTruncateCheckpointOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateCheckpointOnFirstWindowAfterOneSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 2); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.54, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnMiddleWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.34, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 1, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.2, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 2, 3); assertEquals(3, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of()); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnFirstWindowFallback() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnLastWindowWhenNoElementSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, null); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 2, 3); assertNull(result); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundUp() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.6, splitDelegate, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundDown() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.3, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundDownOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.6, splitDelegate, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } }
class SplitTest { private IntervalWindow window1; private IntervalWindow window2; private IntervalWindow window3; private WindowedValue<String> currentElement; private OffsetRange currentRestriction; private Instant currentWatermarkEstimatorState; KV<Instant, Instant> watermarkAndState; private KV<WindowedValue, WindowedValue> createSplitInWindow( OffsetRange primaryRestriction, OffsetRange residualRestriction, BoundedWindow window) { return KV.of( WindowedValue.of( KV.of( currentElement.getValue(), KV.of(primaryRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), window, currentElement.getPane()), WindowedValue.of( KV.of( currentElement.getValue(), KV.of(residualRestriction, watermarkAndState.getValue())), currentElement.getTimestamp(), window, currentElement.getPane())); } private KV<WindowedValue, WindowedValue> createSplitAcrossWindows( List<BoundedWindow> primaryWindows, List<BoundedWindow> residualWindows) { return KV.of( primaryWindows.isEmpty() ? null : WindowedValue.of( KV.of( currentElement.getValue(), KV.of(currentRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), primaryWindows, currentElement.getPane()), residualWindows.isEmpty() ? null : WindowedValue.of( KV.of( currentElement.getValue(), KV.of(currentRestriction, currentWatermarkEstimatorState)), currentElement.getTimestamp(), residualWindows, currentElement.getPane())); } @Before public void setUp() { window1 = new IntervalWindow(Instant.ofEpochMilli(0), Instant.ofEpochMilli(10)); window2 = new IntervalWindow(Instant.ofEpochMilli(10), Instant.ofEpochMilli(20)); window3 = new IntervalWindow(Instant.ofEpochMilli(20), Instant.ofEpochMilli(30)); currentElement = WindowedValue.of( "a", Instant.ofEpochMilli(57), ImmutableList.of(window1, window2, window3), PaneInfo.NO_FIRING); currentRestriction = new OffsetRange(0L, 100L); currentWatermarkEstimatorState = Instant.ofEpochMilli(21); watermarkAndState = KV.of(Instant.ofEpochMilli(42), Instant.ofEpochMilli(42)); } @Test public void testScaleProgress() throws Exception { Progress elementProgress = Progress.from(2, 8); Progress scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 1); assertEquals(2, scaledResult.getWorkCompleted(), 0.0); assertEquals(8, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 3); assertEquals(2, scaledResult.getWorkCompleted(), 0.0); assertEquals(28, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 1, 3); assertEquals(12, scaledResult.getWorkCompleted(), 0.0); assertEquals(18, scaledResult.getWorkRemaining(), 0.0); scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 2, 3); assertEquals(22, scaledResult.getWorkCompleted(), 0.0); assertEquals(8, scaledResult.getWorkRemaining(), 0.0); } @Test public void testTrySplitForProcessCheckpointOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessCheckpointOnFirstWindowAfterOneSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, tracker, watermarkAndState, 0, 2); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 84), new OffsetRange(84, 100), window1); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test @Test public void testTrySplitForProcessSplitOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0.2, tracker, watermarkAndState, 2, 3); assertEquals(3, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedElementSplit = createSplitInWindow(new OffsetRange(0, 44), new OffsetRange(44, 100), window3); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of()); assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot()); assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnFirstWindowFallback() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(100L); assertNull(tracker.trySplit(0.0)); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessSplitOnLastWindowWhenNoElementSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(100L); assertNull(tracker.trySplit(0.0)); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window3, windows, currentWatermarkEstimatorState, 0, tracker, watermarkAndState, 2, 3); assertNull(result); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundUp() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.6, tracker, watermarkAndState, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundDown() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.3, tracker, watermarkAndState, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForProcessOnWindowBoundaryRoundDownOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction); tracker.tryClaim(30L); KV<WindowedSplitResult, Integer> result = FnApiDoFnRunner.trySplitForProcess( currentElement, currentRestriction, window2, windows, currentWatermarkEstimatorState, 0.9, tracker, watermarkAndState, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getPrimarySplitRoot()); assertNull(result.getKey().getResidualSplitRoot()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot()); } private HandlesSplits createSplitDelegate( double progress, double expectedFraction, HandlesSplits.SplitResult result) { return new HandlesSplits() { @Override public SplitResult trySplit(double fractionOfRemainder) { checkArgument(fractionOfRemainder == expectedFraction); return result; } @Override public double getProgress() { return progress; } }; } @Test public void testTrySplitForTruncateCheckpointOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateCheckpointOnFirstWindowAfterOneSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 2); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnFirstWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.54, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnMiddleWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.34, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 1, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3)); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult splitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.2, splitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.2, splitDelegate, 2, 3); assertEquals(3, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of()); assertEquals(splitResult, result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnFirstWindowFallback() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateSplitOnLastWindowWhenNoElementSplit() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, null); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.0, splitDelegate, 2, 3); assertNull(result); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundUp() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.6, splitDelegate, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundDown() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.3, splitDelegate, 0, 3); assertEquals(1, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } @Test public void testTrySplitForTruncateOnWindowBoundaryRoundDownOnLastWindow() throws Exception { List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows()); SplitResult unusedSplitResult = SplitResult.of( ImmutableList.of(BundleApplication.getDefaultInstance()), ImmutableList.of(DelayedBundleApplication.getDefaultInstance())); HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult); KV<KV<WindowedSplitResult, SplitResult>, Integer> result = FnApiDoFnRunner.trySplitForTruncate( currentElement, currentRestriction, window1, windows, currentWatermarkEstimatorState, 0.6, splitDelegate, 0, 3); assertEquals(2, (int) result.getValue()); KV<WindowedValue, WindowedValue> expectedWindowSplit = createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3)); assertNull(result.getKey().getValue()); assertEquals( expectedWindowSplit.getKey(), result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot()); assertEquals( expectedWindowSplit.getValue(), result.getKey().getKey().getResidualInUnprocessedWindowsRoot()); } }
I'm not 100% why the requirements are being documented in this log.
public void onNext(T value) { numberOfMessagesBeforeReadyCheck += 1; if (numberOfMessagesBeforeReadyCheck >= maxMessagesBeforeCheck) { numberOfMessagesBeforeReadyCheck = 0; int waitTime = 1; int totalTimeWaited = 0; int phase = phaser.getPhase(); while (!outboundObserver.isReady()) { try { phaser.awaitAdvanceInterruptibly(phase, waitTime, TimeUnit.SECONDS); } catch (TimeoutException e) { totalTimeWaited += waitTime; waitTime = waitTime * 2; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } if (totalTimeWaited > 0) { if (phase == phaser.getPhase()) { LOGGER.info( "Output channel stalled for {}s, outbound thread {}. gRPC requires that outbound " + "channel be on a non gRPC inbound channel thread for the channel readiness " + "callback to function.", totalTimeWaited, Thread.currentThread().getName()); } else { LOGGER.debug( "Output channel stalled for {}s, outbound thread {}.", totalTimeWaited, Thread.currentThread().getName()); } } } synchronized (outboundObserver) { outboundObserver.onNext(value); } }
"Output channel stalled for {}s, outbound thread {}. gRPC requires that outbound "
public void onNext(T value) { numberOfMessagesBeforeReadyCheck += 1; if (numberOfMessagesBeforeReadyCheck >= maxMessagesBeforeCheck) { numberOfMessagesBeforeReadyCheck = 0; int waitTime = 1; int totalTimeWaited = 0; int phase = phaser.getPhase(); while (!outboundObserver.isReady()) { try { phaser.awaitAdvanceInterruptibly(phase, waitTime, TimeUnit.SECONDS); } catch (TimeoutException e) { totalTimeWaited += waitTime; waitTime = waitTime * 2; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } if (totalTimeWaited > 0) { if (phase == phaser.getPhase()) { LOGGER.info( "Output channel stalled for {}s, outbound thread {}. See: " + "https: + "this issue.", totalTimeWaited, Thread.currentThread().getName()); } else { LOGGER.debug( "Output channel stalled for {}s, outbound thread {}.", totalTimeWaited, Thread.currentThread().getName()); } } } synchronized (outboundObserver) { outboundObserver.onNext(value); } }
class DirectStreamObserver<T> implements StreamObserver<T> { private static final Logger LOGGER = LoggerFactory.getLogger(DirectStreamObserver.class); private static final int DEFAULT_MAX_MESSAGES_BEFORE_CHECK = 100; private final Phaser phaser; private final CallStreamObserver<T> outboundObserver; private final int maxMessagesBeforeCheck; private int numberOfMessagesBeforeReadyCheck; public DirectStreamObserver( Phaser phaser, CallStreamObserver<T> outboundObserver) { this(phaser, outboundObserver, DEFAULT_MAX_MESSAGES_BEFORE_CHECK); } DirectStreamObserver( Phaser phaser, CallStreamObserver<T> outboundObserver, int maxMessagesBeforeCheck) { this.phaser = phaser; this.outboundObserver = outboundObserver; this.maxMessagesBeforeCheck = maxMessagesBeforeCheck; } @Override @Override public void onError(Throwable t) { synchronized (outboundObserver) { outboundObserver.onError(t); } } @Override public void onCompleted() { synchronized (outboundObserver) { outboundObserver.onCompleted(); } } }
class DirectStreamObserver<T> implements StreamObserver<T> { private static final Logger LOGGER = LoggerFactory.getLogger(DirectStreamObserver.class); private static final int DEFAULT_MAX_MESSAGES_BEFORE_CHECK = 100; private final Phaser phaser; private final CallStreamObserver<T> outboundObserver; private final int maxMessagesBeforeCheck; private int numberOfMessagesBeforeReadyCheck; public DirectStreamObserver( Phaser phaser, CallStreamObserver<T> outboundObserver) { this(phaser, outboundObserver, DEFAULT_MAX_MESSAGES_BEFORE_CHECK); } DirectStreamObserver( Phaser phaser, CallStreamObserver<T> outboundObserver, int maxMessagesBeforeCheck) { this.phaser = phaser; this.outboundObserver = outboundObserver; this.maxMessagesBeforeCheck = maxMessagesBeforeCheck; } @Override @Override public void onError(Throwable t) { synchronized (outboundObserver) { outboundObserver.onError(t); } } @Override public void onCompleted() { synchronized (outboundObserver) { outboundObserver.onCompleted(); } } }
Consider rewriting this using `Files` instead of `/bin/ln`
private void createSymlinkToCurrentFile() { if (symlinkName == null) return; File f = new File(fileName); File f2 = new File(f.getParent(), symlinkName); String[] cmd = new String[]{"/bin/ln", "-sf", f.getName(), f2.getPath()}; try { int retval = new ProcessExecuter().exec(cmd).getFirst(); if (retval != 0) { logger.warning("Command '" + Arrays.toString(cmd) + "' + failed with exitcode=" + retval); } } catch (IOException e) { logger.warning("Got '" + e + "' while doing'" + Arrays.toString(cmd) + "'."); } }
int retval = new ProcessExecuter().exec(cmd).getFirst();
private void createSymlinkToCurrentFile() { if (symlinkName == null) return; File f = new File(fileName); File f2 = new File(f.getParent(), symlinkName); String[] cmd = new String[]{"/bin/ln", "-sf", f.getName(), f2.getPath()}; try { int retval = new ProcessExecuter().exec(cmd).getFirst(); if (retval != 0) { logger.warning("Command '" + Arrays.toString(cmd) + "' + failed with exitcode=" + retval); } } catch (IOException e) { logger.warning("Got '" + e + "' while doing'" + Arrays.toString(cmd) + "'."); } }
class LogThread<LOGTYPE> extends Thread { long lastFlush = 0; private FileOutputStream currentOutputStream = null; private long nextRotationTime = 0; private final String filePattern; private String fileName; private long lastDropPosition = 0; private final LogWriter<LOGTYPE> logWriter; private final ArrayBlockingQueue<Operation<LOGTYPE>> logQueue = new ArrayBlockingQueue<>(100000); private final Compression compression; private final long[] rotationTimes; private final String symlinkName; private final ExecutorService executor = Executors.newCachedThreadPool(ThreadFactoryFactory.getDaemonThreadFactory("logfilehandler.compression")); private final NativeIO nativeIO = new NativeIO(); LogThread(LogWriter<LOGTYPE> logWriter, String filePattern, Compression compression, long[] rotationTimes, String symlinkName) { super("Logger"); setDaemon(true); this.logWriter = logWriter; this.filePattern = filePattern; this.compression = compression; this.rotationTimes = rotationTimes; this.symlinkName = (symlinkName != null && !symlinkName.isBlank()) ? symlinkName : null; } @Override public void run() { try { storeLogRecords(); } catch (InterruptedException e) { } catch (Exception e) { com.yahoo.protect.Process.logAndDie("Failed storing log records", e); } internalFlush(); } private void storeLogRecords() throws InterruptedException { while (!isInterrupted()) { Operation<LOGTYPE> r = logQueue.poll(100, TimeUnit.MILLISECONDS); if (r != null) { if (r.type == Operation.Type.flush) { internalFlush(); } else if (r.type == Operation.Type.close) { internalClose(); } else if (r.type == Operation.Type.rotate) { internalRotateNow(); lastFlush = System.nanoTime(); } else if (r.type == Operation.Type.log) { internalPublish(r.log.get()); flushIfOld(3, TimeUnit.SECONDS); } r.countDownLatch.countDown(); } else { flushIfOld(100, TimeUnit.MILLISECONDS); } } } private void flushIfOld(long age, TimeUnit unit) { long now = System.nanoTime(); if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) { internalFlush(); lastFlush = now; } } private synchronized void internalFlush() { try { FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) { if (compression == Compression.GZIP) { long newPos = currentOut.getChannel().position(); if (newPos > lastDropPosition + 102400) { nativeIO.dropPartialFileFromCache(currentOut.getFD(), lastDropPosition, newPos, true); lastDropPosition = newPos; } } else { currentOut.flush(); } } } catch (IOException e) { logger.warning("Failed dropping from cache : " + Exceptions.toMessageString(e)); } } private void internalClose() { try { internalFlush(); FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) currentOut.close(); } catch (Exception e) { logger.log(Level.WARNING, "Got error while closing log file", e); } } private void internalPublish(LOGTYPE r) { long now = System.currentTimeMillis(); if (nextRotationTime <= 0) { nextRotationTime = getNextRotationTime(now); } if (now > nextRotationTime || currentOutputStream == null) { internalRotateNow(); } try { FileOutputStream out = this.currentOutputStream; logWriter.write(r, out); out.write('\n'); } catch (IOException e) { logger.warning("Failed writing log record: " + Exceptions.toMessageString(e)); } } /** * Find next rotation after specified time. * * @param now the specified time; if zero, current time is used. * @return the next rotation time */ long getNextRotationTime(long now) { if (now <= 0) { now = System.currentTimeMillis(); } long nowTod = timeOfDayMillis(now); long next = 0; for (long rotationTime : rotationTimes) { if (nowTod < rotationTime) { next = rotationTime - nowTod + now; break; } } if (next == 0) { next = rotationTimes[0] + lengthOfDayMillis - nowTod + now; } return next; } private void checkAndCreateDir(String pathname) { int lastSlash = pathname.lastIndexOf("/"); if (lastSlash > -1) { String pathExcludingFilename = pathname.substring(0, lastSlash); File filepath = new File(pathExcludingFilename); if (!filepath.exists()) { filepath.mkdirs(); } } } private void internalRotateNow() { String oldFileName = fileName; long now = System.currentTimeMillis(); fileName = LogFormatter.insertDate(filePattern, now); internalFlush(); try { checkAndCreateDir(fileName); FileOutputStream os = new FileOutputStream(fileName, true); currentOutputStream = os; lastDropPosition = 0; LogFileDb.nowLoggingTo(fileName); } catch (IOException e) { throw new RuntimeException("Couldn't open log file '" + fileName + "'", e); } createSymlinkToCurrentFile(); nextRotationTime = 0; if ((oldFileName != null)) { File oldFile = new File(oldFileName); if (oldFile.exists()) { if (compression != Compression.NONE) { executor.execute(() -> runCompression(oldFile, compression)); } else { nativeIO.dropFileFromCache(oldFile); } } } } private static void runCompression(File oldFile, Compression compression) { switch (compression) { case ZSTD: runCompressionZstd(oldFile.toPath()); break; case GZIP: runCompressionGzip(oldFile); break; default: throw new IllegalArgumentException("Unknown compression " + compression); } } private static void runCompressionZstd(Path oldFile) { try { Path compressedFile = Paths.get(oldFile.toString() + ".zst"); Files.createFile(compressedFile); int bufferSize = 0x400000; byte[] buffer = new byte[bufferSize]; try (ZstdOuputStream out = new ZstdOuputStream(Files.newOutputStream(compressedFile), bufferSize); InputStream in = Files.newInputStream(oldFile)) { int read; while ((read = in.read(buffer)) >= 0) { out.write(buffer, 0, read); } out.flush(); } Files.delete(oldFile); } catch (IOException e) { logger.log(Level.WARNING, "Failed to compress log file with zstd: " + oldFile, e); } } private static void runCompressionGzip(File oldFile) { File gzippedFile = new File(oldFile.getPath() + ".gz"); try (GZIPOutputStream compressor = new GZIPOutputStream(new FileOutputStream(gzippedFile), 0x100000); FileInputStream inputStream = new FileInputStream(oldFile)) { byte[] buffer = new byte[0x400000]; long totalBytesRead = 0; NativeIO nativeIO = new NativeIO(); for (int read = inputStream.read(buffer); read > 0; read = inputStream.read(buffer)) { compressor.write(buffer, 0, read); nativeIO.dropPartialFileFromCache(inputStream.getFD(), totalBytesRead, read, false); totalBytesRead += read; } compressor.finish(); compressor.flush(); oldFile.delete(); nativeIO.dropFileFromCache(gzippedFile); } catch (IOException e) { logger.warning("Got '" + e + "' while compressing '" + oldFile.getPath() + "'."); } } /** * Name files by date - create a symlink with a constant name to the newest file */ private static final long lengthOfDayMillis = 24 * 60 * 60 * 1000; private static long timeOfDayMillis(long time) { return time % lengthOfDayMillis; } }
class LogThread<LOGTYPE> extends Thread { private final Pollable<LOGTYPE> operationProvider; long lastFlush = 0; private FileOutputStream currentOutputStream = null; private long nextRotationTime = 0; private final String filePattern; private volatile String fileName; private long lastDropPosition = 0; private final LogWriter<LOGTYPE> logWriter; private final Compression compression; private final long[] rotationTimes; private final String symlinkName; private final ExecutorService executor = Executors.newCachedThreadPool(ThreadFactoryFactory.getDaemonThreadFactory("logfilehandler.compression")); private final NativeIO nativeIO = new NativeIO(); LogThread(LogWriter<LOGTYPE> logWriter, String filePattern, Compression compression, long[] rotationTimes, String symlinkName, Pollable<LOGTYPE> operationProvider) { super("Logger"); setDaemon(true); this.logWriter = logWriter; this.filePattern = filePattern; this.compression = compression; this.rotationTimes = rotationTimes; this.symlinkName = (symlinkName != null && !symlinkName.isBlank()) ? symlinkName : null; this.operationProvider = operationProvider; } @Override public void run() { try { handleLogOperations(); } catch (InterruptedException e) { } catch (Exception e) { Process.logAndDie("Failed storing log records", e); } internalFlush(); } private void handleLogOperations() throws InterruptedException { while (!isInterrupted()) { Operation<LOGTYPE> r = operationProvider.poll(); if (r != null) { if (r.type == Operation.Type.flush) { internalFlush(); } else if (r.type == Operation.Type.close) { internalClose(); } else if (r.type == Operation.Type.rotate) { internalRotateNow(); lastFlush = System.nanoTime(); } else if (r.type == Operation.Type.log) { internalPublish(r.log.get()); flushIfOld(3, TimeUnit.SECONDS); } r.countDownLatch.countDown(); } else { flushIfOld(100, TimeUnit.MILLISECONDS); } } } private void flushIfOld(long age, TimeUnit unit) { long now = System.nanoTime(); if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) { internalFlush(); lastFlush = now; } } private synchronized void internalFlush() { try { FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) { if (compression == Compression.GZIP) { long newPos = currentOut.getChannel().position(); if (newPos > lastDropPosition + 102400) { nativeIO.dropPartialFileFromCache(currentOut.getFD(), lastDropPosition, newPos, true); lastDropPosition = newPos; } } else { currentOut.flush(); } } } catch (IOException e) { logger.warning("Failed dropping from cache : " + Exceptions.toMessageString(e)); } } private void internalClose() { try { internalFlush(); FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) currentOut.close(); } catch (Exception e) { logger.log(Level.WARNING, "Got error while closing log file", e); } } private void internalPublish(LOGTYPE r) { long now = System.currentTimeMillis(); if (nextRotationTime <= 0) { nextRotationTime = getNextRotationTime(now); } if (now > nextRotationTime || currentOutputStream == null) { internalRotateNow(); } try { FileOutputStream out = this.currentOutputStream; logWriter.write(r, out); out.write('\n'); } catch (IOException e) { logger.warning("Failed writing log record: " + Exceptions.toMessageString(e)); } } /** * Find next rotation after specified time. * * @param now the specified time; if zero, current time is used. * @return the next rotation time */ long getNextRotationTime(long now) { if (now <= 0) { now = System.currentTimeMillis(); } long nowTod = timeOfDayMillis(now); long next = 0; for (long rotationTime : rotationTimes) { if (nowTod < rotationTime) { next = rotationTime - nowTod + now; break; } } if (next == 0) { next = rotationTimes[0] + lengthOfDayMillis - nowTod + now; } return next; } private void checkAndCreateDir(String pathname) { int lastSlash = pathname.lastIndexOf("/"); if (lastSlash > -1) { String pathExcludingFilename = pathname.substring(0, lastSlash); File filepath = new File(pathExcludingFilename); if (!filepath.exists()) { filepath.mkdirs(); } } } private void internalRotateNow() { String oldFileName = fileName; long now = System.currentTimeMillis(); fileName = LogFormatter.insertDate(filePattern, now); internalFlush(); try { checkAndCreateDir(fileName); FileOutputStream os = new FileOutputStream(fileName, true); currentOutputStream = os; lastDropPosition = 0; LogFileDb.nowLoggingTo(fileName); } catch (IOException e) { throw new RuntimeException("Couldn't open log file '" + fileName + "'", e); } createSymlinkToCurrentFile(); nextRotationTime = 0; if ((oldFileName != null)) { File oldFile = new File(oldFileName); if (oldFile.exists()) { if (compression != Compression.NONE) { executor.execute(() -> runCompression(oldFile, compression)); } else { nativeIO.dropFileFromCache(oldFile); } } } } private static void runCompression(File oldFile, Compression compression) { switch (compression) { case ZSTD: runCompressionZstd(oldFile.toPath()); break; case GZIP: runCompressionGzip(oldFile); break; default: throw new IllegalArgumentException("Unknown compression " + compression); } } private static void runCompressionZstd(Path oldFile) { try { Path compressedFile = Paths.get(oldFile.toString() + ".zst"); Files.createFile(compressedFile); int bufferSize = 0x400000; byte[] buffer = new byte[bufferSize]; try (ZstdOuputStream out = new ZstdOuputStream(Files.newOutputStream(compressedFile), bufferSize); InputStream in = Files.newInputStream(oldFile)) { int read; while ((read = in.read(buffer)) >= 0) { out.write(buffer, 0, read); } out.flush(); } Files.delete(oldFile); } catch (IOException e) { logger.log(Level.WARNING, "Failed to compress log file with zstd: " + oldFile, e); } } private static void runCompressionGzip(File oldFile) { File gzippedFile = new File(oldFile.getPath() + ".gz"); NativeIO nativeIO = new NativeIO(); try (GZIPOutputStream compressor = new GZIPOutputStream(new FileOutputStream(gzippedFile), 0x100000); FileInputStream inputStream = new FileInputStream(oldFile)) { byte[] buffer = new byte[0x400000]; long totalBytesRead = 0; for (int read = inputStream.read(buffer); read > 0; read = inputStream.read(buffer)) { compressor.write(buffer, 0, read); nativeIO.dropPartialFileFromCache(inputStream.getFD(), totalBytesRead, read, false); totalBytesRead += read; } compressor.finish(); compressor.flush(); } catch (IOException e) { logger.warning("Got '" + e + "' while compressing '" + oldFile.getPath() + "'."); } oldFile.delete(); nativeIO.dropFileFromCache(gzippedFile); } /** * Name files by date - create a symlink with a constant name to the newest file */ private static final long lengthOfDayMillis = 24 * 60 * 60 * 1000; private static long timeOfDayMillis(long time) { return time % lengthOfDayMillis; } }
Done, if the user limits the count. Otherwise I don't have access to the file path or size and can't infer it. I will file an issue in vert.x.
public void writeResponse(AsyncFile file, Type genericType, ServerRequestContext context) throws WebApplicationException { ResteasyReactiveRequestContext ctx = ((ResteasyReactiveRequestContext) context); ctx.suspend(); ServerHttpResponse response = context.serverResponse(); response.setChunked(true); file.handler(buffer -> { try { response.write(buffer.getBytes()); } catch (Exception x) { ctx.resume(x); return; } if (response.isWriteQueueFull()) { file.pause(); response.addDrainHandler(new Runnable() { @Override public void run() { file.resume(); } }); } }); file.endHandler(new Handler<Void>() { @Override public void handle(Void event) { response.end(); ctx.resume(); } }); }
response.setChunked(true);
public void writeResponse(AsyncFile file, Type genericType, ServerRequestContext context) throws WebApplicationException { ResteasyReactiveRequestContext ctx = ((ResteasyReactiveRequestContext) context); ctx.suspend(); ServerHttpResponse response = context.serverResponse(); if (file.getReadLength() != Long.MAX_VALUE) { response.setResponseHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(file.getReadLength())); } else { response.setChunked(true); } file.handler(buffer -> { try { response.write(buffer.getBytes()); } catch (Exception x) { ctx.resume(x); return; } if (response.isWriteQueueFull()) { file.pause(); response.addDrainHandler(new Runnable() { @Override public void run() { file.resume(); } }); } }); file.endHandler(new Handler<Void>() { @Override public void handle(Void event) { file.close(); response.end(); ctx.resume(); } }); }
class ServerVertxAsyncFileMessageBodyWriter extends VertxAsyncFileMessageBodyWriter implements ServerMessageBodyWriter<AsyncFile> { @Override public boolean isWriteable(Class<?> type, Type genericType, ResteasyReactiveResourceInfo target, MediaType mediaType) { return AsyncFile.class.isAssignableFrom(type); } @Override }
class ServerVertxAsyncFileMessageBodyWriter extends VertxAsyncFileMessageBodyWriter implements ServerMessageBodyWriter<AsyncFile> { @Override public boolean isWriteable(Class<?> type, Type genericType, ResteasyReactiveResourceInfo target, MediaType mediaType) { return AsyncFile.class.isAssignableFrom(type); } @Override }
I think you are right (about the last thing). Fixed.
private boolean changesAvailable(Application application, JobStatus previous, JobStatus next) { if ( ! application.deploying().isPresent()) return false; Change change = application.deploying().get(); if (change instanceof Change.VersionChange) { Version targetVersion = ((Change.VersionChange)change).version(); if ( ! lastSuccessfulIs(targetVersion, JobType.stagingTest, application)) return false; if ( previous.type().isProduction() && ! isOnAtLeastProductionVersion(targetVersion, application, previous.type())) return false; if (next != null && next.type().isTest() && lastSuccessfulIs(targetVersion, next.type(), application)) return false; if (next != null && next.type().isProduction() && isOnAtLeastProductionVersion(targetVersion, application, next.type())) return false; return true; } else { if ( ! previous.lastSuccess().isPresent()) return false; if (next == null) return true; if ( ! next.lastSuccess().isPresent()) return true; return previous.lastSuccess().get().revision().isPresent() && ! previous.lastSuccess().get().revision().equals(next.lastSuccess().get().revision()); } }
return false;
private boolean changesAvailable(Application application, JobStatus previous, JobStatus next) { if ( ! application.deploying().isPresent()) return false; if (next == null) return true; Change change = application.deploying().get(); if (change instanceof Change.VersionChange) { Version targetVersion = ((Change.VersionChange)change).version(); if (next.type().isTest()) { if ( ! lastSuccessfulIs(targetVersion, previous.type(), application)) return false; if (lastSuccessfulIs(targetVersion, next.type(), application)) return false; } else if (next.type().isProduction()) { if ( ! lastSuccessfulIs(targetVersion, JobType.stagingTest, application)) return false; if (previous.type().isProduction() && ! alreadyDeployed(targetVersion, application, previous.type())) return false; if (alreadyDeployed(targetVersion, application, next.type())) return false; } else throw new IllegalStateException("Unclassified type of next job: " + next); return true; } else { if ( ! previous.lastSuccess().isPresent()) return false; if ( ! next.lastSuccess().isPresent()) return true; return previous.lastSuccess().get().revision().isPresent() && ! previous.lastSuccess().get().revision().equals(next.lastSuccess().get().revision()); } }
class DeploymentTrigger { /** The max duration a job may run before we consider it dead/hanging */ private final Duration jobTimeout; private final static Logger log = Logger.getLogger(DeploymentTrigger.class.getName()); private final Controller controller; private final Clock clock; private final BuildSystem buildSystem; private final DeploymentOrder order; public DeploymentTrigger(Controller controller, CuratorDb curator, Clock clock) { Objects.requireNonNull(controller,"controller cannot be null"); Objects.requireNonNull(curator,"curator cannot be null"); Objects.requireNonNull(clock,"clock cannot be null"); this.controller = controller; this.clock = clock; this.buildSystem = new PolledBuildSystem(controller, curator); this.order = new DeploymentOrder(controller); this.jobTimeout = controller.system().equals(SystemName.main) ? Duration.ofHours(12) : Duration.ofHours(1); } /** Returns the time in the past before which jobs are at this moment considered unresponsive */ public Instant jobTimeoutLimit() { return clock.instant().minus(jobTimeout); } public BuildSystem buildSystem() { return buildSystem; } public DeploymentOrder deploymentOrder() { return order; } /** * Called each time a job completes (successfully or not) to cause triggering of one or more follow-up jobs * (which may possibly the same job once over). * * @param report information about the job that just completed */ public void triggerFromCompletion(JobReport report) { applications().lockOrThrow(report.applicationId(), application -> { application = application.withJobCompletion(report, clock.instant(), controller); if (report.success()) { if (report.jobType() == JobType.component) { if (acceptNewRevisionNow(application)) { if ( ! ( application.deploying().isPresent() && (application.deploying().get() instanceof Change.VersionChange))) application = application.withDeploying(Optional.of(Change.ApplicationChange.unknown())); } else { applications().store(application.withOutstandingChange(true)); return; } } else if (deploymentComplete(application)) { application = application.withDeploying(Optional.empty()); } } if (report.success()) application = trigger(order.nextAfter(report.jobType(), application), application, report.jobType().jobName() + " completed"); else if (retryBecauseOutOfCapacity(application, report.jobType())) application = trigger(report.jobType(), application, true, "Retrying on out of capacity"); else if (retryBecauseNewFailure(application, report.jobType())) application = trigger(report.jobType(), application, false, "Immediate retry on failure"); applications().store(application); }); } /** Returns whether all production zones listed in deployment spec has this change (or a newer version, if upgrade) */ private boolean deploymentComplete(LockedApplication application) { if ( ! application.deploying().isPresent()) return true; Change change = application.deploying().get(); for (JobType job : order.jobsFrom(application.deploymentSpec())) { if ( ! job.isProduction()) continue; Optional<ZoneId> zone = job.zone(this.controller.system()); if ( ! zone.isPresent()) continue; Deployment deployment = application.deployments().get(zone.get()); if (deployment == null) return false; if (change instanceof VersionChange) { if (((VersionChange)change).version().isAfter(deployment.version())) return false; } else if (((Change.ApplicationChange)change).revision().isPresent()) { if ( ! ((Change.ApplicationChange)change).revision().get().equals(deployment.revision())) return false; } else { return false; } } return true; } /** * Find jobs that can and should run but are currently not. */ public void triggerReadyJobs() { ApplicationList applications = ApplicationList.from(applications().asList()); applications = applications.notPullRequest(); for (Application application : applications.asList()) applications().lockIfPresent(application.id(), this::triggerReadyJobs); } /** Find the next step to trigger if any, and triggers it */ public void triggerReadyJobs(LockedApplication application) { if ( ! application.deploying().isPresent()) return; List<JobType> jobs = order.jobsFrom(application.deploymentSpec()); if ( ! jobs.isEmpty() && jobs.get(0).equals(JobType.systemTest) ) { JobStatus systemTestStatus = application.deploymentJobs().jobStatus().get(JobType.systemTest); if (application.deploying().get() instanceof Change.VersionChange) { Version target = ((Change.VersionChange) application.deploying().get()).version(); if (systemTestStatus == null || ! systemTestStatus.lastTriggered().isPresent() || ! systemTestStatus.isSuccess() || ! systemTestStatus.lastTriggered().get().version().equals(target) || systemTestStatus.isHanging(jobTimeoutLimit())) { application = trigger(JobType.systemTest, application, false, "Upgrade to " + target); controller.applications().store(application); } } else { JobStatus componentStatus = application.deploymentJobs().jobStatus().get(JobType.component); if (changesAvailable(application, componentStatus, systemTestStatus)) { application = trigger(JobType.systemTest, application, false, "Available change in component"); controller.applications().store(application); } } } for (JobType jobType : jobs) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); if (jobStatus == null) continue; if (jobStatus.isRunning(jobTimeoutLimit())) continue; List<JobType> nextToTrigger = new ArrayList<>(); for (JobType nextJobType : order.nextAfter(jobType, application)) { JobStatus nextStatus = application.deploymentJobs().jobStatus().get(nextJobType); if (changesAvailable(application, jobStatus, nextStatus) || nextStatus.isHanging(jobTimeoutLimit())) nextToTrigger.add(nextJobType); } application = trigger(nextToTrigger, application, "Available change in " + jobType.jobName()); controller.applications().store(application); } } /** * Returns true if the previous job has completed successfully with a revision and/or version which is * newer (different) than the one last completed successfully in next */ /** * Triggers a change of this application * * @param applicationId the application to trigger * @throws IllegalArgumentException if this application already have an ongoing change */ public void triggerChange(ApplicationId applicationId, Change change) { applications().lockOrThrow(applicationId, application -> { if (application.deploying().isPresent() && ! application.deploymentJobs().hasFailures()) throw new IllegalArgumentException("Could not start " + change + " on " + application + ": " + application.deploying().get() + " is already in progress"); application = application.withDeploying(Optional.of(change)); if (change instanceof Change.ApplicationChange) application = application.withOutstandingChange(false); application = trigger(JobType.systemTest, application, false, (change instanceof Change.VersionChange ? "Upgrading to " + ((Change.VersionChange)change).version() : "Deploying " + change)); applications().store(application); }); } /** * Cancels any ongoing upgrade of the given application * * @param applicationId the application to trigger */ public void cancelChange(ApplicationId applicationId) { applications().lockOrThrow(applicationId, application -> { buildSystem.removeJobs(application.id()); applications().store(application.withDeploying(Optional.empty())); }); } private ApplicationController applications() { return controller.applications(); } /** Retry immediately only if this job just started failing. Otherwise retry periodically */ private boolean retryBecauseNewFailure(Application application, JobType jobType) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); return (jobStatus != null && jobStatus.firstFailing().get().at().isAfter(clock.instant().minus(Duration.ofSeconds(10)))); } /** Decide whether to retry due to capacity restrictions */ private boolean retryBecauseOutOfCapacity(Application application, JobType jobType) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); if (jobStatus == null || ! jobStatus.jobError().equals(Optional.of(JobError.outOfCapacity))) return false; return jobStatus.firstFailing().get().at().isAfter(clock.instant().minus(Duration.ofMinutes(15))); } /** Returns whether the given job type should be triggered according to deployment spec */ private boolean hasJob(JobType jobType, Application application) { if ( ! jobType.isProduction()) return true; return application.deploymentSpec().includes(jobType.environment(), jobType.region(controller.system())); } /** * Trigger a job for an application * * @param jobType the type of the job to trigger, or null to trigger nothing * @param application the application to trigger the job for * @param first whether to put the job at the front of the build system queue (or the back) * @param reason describes why the job is triggered * @return the application in the triggered state, which *must* be stored by the caller */ private LockedApplication trigger(JobType jobType, LockedApplication application, boolean first, String reason) { if (jobType.isProduction() && isRunningProductionJob(application)) return application; return triggerAllowParallel(jobType, application, first, false, reason); } private LockedApplication trigger(List<JobType> jobs, LockedApplication application, String reason) { if (jobs.stream().anyMatch(JobType::isProduction) && isRunningProductionJob(application)) return application; for (JobType job : jobs) application = triggerAllowParallel(job, application, false, false, reason); return application; } /** * Trigger a job for an application, if allowed * * @param jobType the type of the job to trigger, or null to trigger nothing * @param application the application to trigger the job for * @param first whether to trigger the job before other jobs * @param force true to disable checks which should normally prevent this triggering from happening * @param reason describes why the job is triggered * @return the application in the triggered state, if actually triggered. This *must* be stored by the caller */ public LockedApplication triggerAllowParallel(JobType jobType, LockedApplication application, boolean first, boolean force, String reason) { if (jobType == null) return application; if ( ! application.deploymentJobs().isDeployableTo(jobType.environment(), application.deploying())) { log.warning(String.format("Want to trigger %s for %s with reason %s, but change is untested", jobType, application, reason)); return application; } if ( ! force && ! allowedTriggering(jobType, application)) return application; log.info(String.format("Triggering %s for %s, %s: %s", jobType, application, application.deploying().map(d -> "deploying " + d).orElse("restarted deployment"), reason)); buildSystem.addJob(application.id(), jobType, first); return application.withJobTriggering(jobType, application.deploying(), clock.instant(), application.deployVersionFor(jobType, controller), application.deployRevisionFor(jobType, controller), reason); } /** Returns true if the given proposed job triggering should be effected */ private boolean allowedTriggering(JobType jobType, LockedApplication application) { if (jobType.isProduction() && application.deploying().isPresent() && application.deploying().get().blockedBy(application.deploymentSpec(), clock.instant())) return false; if (application.deploying().isPresent() && application.deploying().get() instanceof VersionChange && jobType.isProduction() && isOnAtLeastProductionVersion(((VersionChange) application.deploying().get()).version(), application, jobType)) return false; if (application.deploymentJobs().isRunning(jobType, jobTimeoutLimit())) return false; if ( ! hasJob(jobType, application)) return false; if ( ! application.deploymentJobs().projectId().isPresent()) return false; return true; } private boolean isRunningProductionJob(Application application) { return JobList.from(application) .production() .running(jobTimeoutLimit()) .anyMatch(); } /** * Returns whether the current deployed version in the zone given by the job * is newer or equal to the given version. This may be the case even if the production job * in question failed, if the failure happens after deployment. * In that case we should never deploy an earlier version as that may potentially * downgrade production nodes which we are not guaranteed to support, and upgradibng to the current * version is just unnecessary work. */ private boolean isOnAtLeastProductionVersion(Version version, Application application, JobType job) { if ( ! job.isProduction()) return false; Optional<ZoneId> zone = job.zone(controller.system()); if ( ! zone.isPresent()) return false; Deployment existingDeployment = application.deployments().get(zone.get()); if (existingDeployment == null) return false; return existingDeployment.version().isAfter(version) || existingDeployment.version().equals(version); } private boolean acceptNewRevisionNow(LockedApplication application) { if ( ! application.deploying().isPresent()) return true; if (application.deploying().get() instanceof Change.ApplicationChange) return true; if (application.deploymentJobs().hasFailures()) return true; if (application.isBlocked(clock.instant())) return true; return false; } private boolean lastSuccessfulIs(Version version, JobType jobType, Application application) { JobStatus status = application.deploymentJobs().jobStatus().get(jobType); if (status == null) return false; Optional<JobStatus.JobRun> lastSuccessfulStagingRun = status.lastSuccess(); if ( ! lastSuccessfulStagingRun.isPresent()) return false; return lastSuccessfulStagingRun.get().version().equals(version); } }
class DeploymentTrigger { /** The max duration a job may run before we consider it dead/hanging */ private final Duration jobTimeout; private final static Logger log = Logger.getLogger(DeploymentTrigger.class.getName()); private final Controller controller; private final Clock clock; private final BuildSystem buildSystem; private final DeploymentOrder order; public DeploymentTrigger(Controller controller, CuratorDb curator, Clock clock) { Objects.requireNonNull(controller,"controller cannot be null"); Objects.requireNonNull(curator,"curator cannot be null"); Objects.requireNonNull(clock,"clock cannot be null"); this.controller = controller; this.clock = clock; this.buildSystem = new PolledBuildSystem(controller, curator); this.order = new DeploymentOrder(controller); this.jobTimeout = controller.system().equals(SystemName.main) ? Duration.ofHours(12) : Duration.ofHours(1); } /** Returns the time in the past before which jobs are at this moment considered unresponsive */ public Instant jobTimeoutLimit() { return clock.instant().minus(jobTimeout); } public BuildSystem buildSystem() { return buildSystem; } public DeploymentOrder deploymentOrder() { return order; } /** * Called each time a job completes (successfully or not) to cause triggering of one or more follow-up jobs * (which may possibly the same job once over). * * @param report information about the job that just completed */ public void triggerFromCompletion(JobReport report) { applications().lockOrThrow(report.applicationId(), application -> { application = application.withJobCompletion(report, clock.instant(), controller); if (report.success()) { if (report.jobType() == JobType.component) { if (acceptNewRevisionNow(application)) { if ( ! ( application.deploying().isPresent() && (application.deploying().get() instanceof Change.VersionChange))) application = application.withDeploying(Optional.of(Change.ApplicationChange.unknown())); } else { applications().store(application.withOutstandingChange(true)); return; } } else if (deploymentComplete(application)) { application = application.withDeploying(Optional.empty()); } } if (report.success()) application = trigger(order.nextAfter(report.jobType(), application), application, report.jobType().jobName() + " completed"); else if (retryBecauseOutOfCapacity(application, report.jobType())) application = trigger(report.jobType(), application, true, "Retrying on out of capacity"); else if (retryBecauseNewFailure(application, report.jobType())) application = trigger(report.jobType(), application, false, "Immediate retry on failure"); applications().store(application); }); } /** Returns whether all production zones listed in deployment spec has this change (or a newer version, if upgrade) */ private boolean deploymentComplete(LockedApplication application) { if ( ! application.deploying().isPresent()) return true; Change change = application.deploying().get(); for (JobType job : order.jobsFrom(application.deploymentSpec())) { if ( ! job.isProduction()) continue; Optional<ZoneId> zone = job.zone(this.controller.system()); if ( ! zone.isPresent()) continue; Deployment deployment = application.deployments().get(zone.get()); if (deployment == null) return false; if (change instanceof VersionChange) { if (((VersionChange)change).version().isAfter(deployment.version())) return false; } else if (((Change.ApplicationChange)change).revision().isPresent()) { if ( ! ((Change.ApplicationChange)change).revision().get().equals(deployment.revision())) return false; } else { return false; } } return true; } /** * Find jobs that can and should run but are currently not. */ public void triggerReadyJobs() { ApplicationList applications = ApplicationList.from(applications().asList()); applications = applications.notPullRequest(); for (Application application : applications.asList()) applications().lockIfPresent(application.id(), this::triggerReadyJobs); } /** Find the next step to trigger if any, and triggers it */ public void triggerReadyJobs(LockedApplication application) { if ( ! application.deploying().isPresent()) return; List<JobType> jobs = order.jobsFrom(application.deploymentSpec()); if ( ! jobs.isEmpty() && jobs.get(0).equals(JobType.systemTest) ) { JobStatus systemTestStatus = application.deploymentJobs().jobStatus().get(JobType.systemTest); if (application.deploying().get() instanceof Change.VersionChange) { Version target = ((Change.VersionChange) application.deploying().get()).version(); if (systemTestStatus == null || ! systemTestStatus.lastTriggered().isPresent() || ! systemTestStatus.isSuccess() || ! systemTestStatus.lastTriggered().get().version().equals(target) || systemTestStatus.isHanging(jobTimeoutLimit())) { application = trigger(JobType.systemTest, application, false, "Upgrade to " + target); controller.applications().store(application); } } else { JobStatus componentStatus = application.deploymentJobs().jobStatus().get(JobType.component); if (componentStatus != null && changesAvailable(application, componentStatus, systemTestStatus)) { application = trigger(JobType.systemTest, application, false, "Available change in component"); controller.applications().store(application); } } } for (JobType jobType : jobs) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); if (jobStatus == null) continue; if (jobStatus.isRunning(jobTimeoutLimit())) continue; List<JobType> nextToTrigger = new ArrayList<>(); for (JobType nextJobType : order.nextAfter(jobType, application)) { JobStatus nextStatus = application.deploymentJobs().jobStatus().get(nextJobType); if (changesAvailable(application, jobStatus, nextStatus) || nextStatus.isHanging(jobTimeoutLimit())) nextToTrigger.add(nextJobType); } application = trigger(nextToTrigger, application, "Available change in " + jobType.jobName()); controller.applications().store(application); } } /** * Returns true if the previous job has completed successfully with a revision and/or version which is * newer (different) than the one last completed successfully in next */ /** * Triggers a change of this application * * @param applicationId the application to trigger * @throws IllegalArgumentException if this application already have an ongoing change */ public void triggerChange(ApplicationId applicationId, Change change) { applications().lockOrThrow(applicationId, application -> { if (application.deploying().isPresent() && ! application.deploymentJobs().hasFailures()) throw new IllegalArgumentException("Could not start " + change + " on " + application + ": " + application.deploying().get() + " is already in progress"); application = application.withDeploying(Optional.of(change)); if (change instanceof Change.ApplicationChange) application = application.withOutstandingChange(false); application = trigger(JobType.systemTest, application, false, (change instanceof Change.VersionChange ? "Upgrading to " + ((Change.VersionChange)change).version() : "Deploying " + change)); applications().store(application); }); } /** * Cancels any ongoing upgrade of the given application * * @param applicationId the application to trigger */ public void cancelChange(ApplicationId applicationId) { applications().lockOrThrow(applicationId, application -> { buildSystem.removeJobs(application.id()); applications().store(application.withDeploying(Optional.empty())); }); } private ApplicationController applications() { return controller.applications(); } /** Retry immediately only if this job just started failing. Otherwise retry periodically */ private boolean retryBecauseNewFailure(Application application, JobType jobType) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); return (jobStatus != null && jobStatus.firstFailing().get().at().isAfter(clock.instant().minus(Duration.ofSeconds(10)))); } /** Decide whether to retry due to capacity restrictions */ private boolean retryBecauseOutOfCapacity(Application application, JobType jobType) { JobStatus jobStatus = application.deploymentJobs().jobStatus().get(jobType); if (jobStatus == null || ! jobStatus.jobError().equals(Optional.of(JobError.outOfCapacity))) return false; return jobStatus.firstFailing().get().at().isAfter(clock.instant().minus(Duration.ofMinutes(15))); } /** Returns whether the given job type should be triggered according to deployment spec */ private boolean hasJob(JobType jobType, Application application) { if ( ! jobType.isProduction()) return true; return application.deploymentSpec().includes(jobType.environment(), jobType.region(controller.system())); } /** * Trigger a job for an application * * @param jobType the type of the job to trigger, or null to trigger nothing * @param application the application to trigger the job for * @param first whether to put the job at the front of the build system queue (or the back) * @param reason describes why the job is triggered * @return the application in the triggered state, which *must* be stored by the caller */ private LockedApplication trigger(JobType jobType, LockedApplication application, boolean first, String reason) { if (jobType.isProduction() && isRunningProductionJob(application)) return application; return triggerAllowParallel(jobType, application, first, false, reason); } private LockedApplication trigger(List<JobType> jobs, LockedApplication application, String reason) { if (jobs.stream().anyMatch(JobType::isProduction) && isRunningProductionJob(application)) return application; for (JobType job : jobs) application = triggerAllowParallel(job, application, false, false, reason); return application; } /** * Trigger a job for an application, if allowed * * @param jobType the type of the job to trigger, or null to trigger nothing * @param application the application to trigger the job for * @param first whether to trigger the job before other jobs * @param force true to disable checks which should normally prevent this triggering from happening * @param reason describes why the job is triggered * @return the application in the triggered state, if actually triggered. This *must* be stored by the caller */ public LockedApplication triggerAllowParallel(JobType jobType, LockedApplication application, boolean first, boolean force, String reason) { if (jobType == null) return application; if ( ! application.deploymentJobs().isDeployableTo(jobType.environment(), application.deploying())) { log.warning(String.format("Want to trigger %s for %s with reason %s, but change is untested", jobType, application, reason)); return application; } if ( ! force && ! allowedTriggering(jobType, application)) return application; log.info(String.format("Triggering %s for %s, %s: %s", jobType, application, application.deploying().map(d -> "deploying " + d).orElse("restarted deployment"), reason)); buildSystem.addJob(application.id(), jobType, first); return application.withJobTriggering(jobType, application.deploying(), clock.instant(), application.deployVersionFor(jobType, controller), application.deployRevisionFor(jobType, controller), reason); } /** Returns true if the given proposed job triggering should be effected */ private boolean allowedTriggering(JobType jobType, LockedApplication application) { if (jobType.isProduction() && application.deploying().isPresent() && application.deploying().get().blockedBy(application.deploymentSpec(), clock.instant())) return false; if (application.deploying().isPresent() && application.deploying().get() instanceof VersionChange && jobType.isProduction() && alreadyDeployed(((VersionChange) application.deploying().get()).version(), application, jobType)) return false; if (application.deploymentJobs().isRunning(jobType, jobTimeoutLimit())) return false; if ( ! hasJob(jobType, application)) return false; if ( ! application.deploymentJobs().projectId().isPresent()) return false; return true; } private boolean isRunningProductionJob(Application application) { return JobList.from(application) .production() .running(jobTimeoutLimit()) .anyMatch(); } /** * Returns whether the currently deployed version in the zone for the given production job is newer * than the given version, in which case we should avoid an unsupported downgrade, or if it is the * same version, and was successfully deployed, in which case it is unnecessary to redeploy it. */ private boolean alreadyDeployed(Version version, Application application, JobType job) { if ( ! job.isProduction()) throw new IllegalArgumentException(job + " is not a production job!"); return lastSuccessfulIs(version, job, application) || job.zone(controller.system()) .map(zone -> application.deployments().get(zone)) .map(deployment -> deployment.version().isAfter(version)) .orElse(false); } private boolean acceptNewRevisionNow(LockedApplication application) { if ( ! application.deploying().isPresent()) return true; if (application.deploying().get() instanceof Change.ApplicationChange) return true; if (application.deploymentJobs().hasFailures()) return true; if (application.isBlocked(clock.instant())) return true; return false; } private boolean lastSuccessfulIs(Version version, JobType jobType, Application application) { JobStatus status = application.deploymentJobs().jobStatus().get(jobType); if (status == null) return false; Optional<JobStatus.JobRun> lastSuccessfulRun = status.lastSuccess(); if ( ! lastSuccessfulRun.isPresent()) return false; return lastSuccessfulRun.get().version().equals(version); } }
I see, there are two methods: `aggregateFieldBaseValue` and `aggregateField`
public void testAggregateLogicalValuesGlobally() { Collection<BasicEnum> elements = Lists.newArrayList( BasicEnum.of("a", BasicEnum.Test.ONE), BasicEnum.of("a", BasicEnum.Test.TWO)); SampleAnyCombineFn<EnumerationType.Value> sampleAnyCombineFn = new SampleAnyCombineFn<>(100); Field aggField = Field.of("sampleList", FieldType.array(FieldType.logicalType(BASIC_ENUM_ENUMERATION))); pipeline .apply(Create.of(elements)) .apply( Group.<BasicEnum>globally().aggregateField("enumeration", sampleAnyCombineFn, aggField)) .apply( ParDo.of( new DoFn<Row, List<Integer>>() { @ProcessElement public void process(@Element Row value) { assertThat( value.getArray(0), containsInAnyOrder( BASIC_ENUM_ENUMERATION.valueOf(1), BASIC_ENUM_ENUMERATION.valueOf(2))); } })); pipeline.run(); } private static <T> Void containsKIterableVs( List<Row> expectedKvs, Iterable<Row> actualKvs, T[] emptyArray) { List<Row> list = Lists.newArrayList(actualKvs); List<Matcher<? super Row>> matchers = new ArrayList<>(); for (Row expected : expectedKvs) { List<Matcher> fieldMatchers = Lists.newArrayList(); fieldMatchers.add( new RowFieldMatcherIterableFieldAnyOrder(expected.getSchema(), 0, expected.getRow(0))); assertEquals(TypeName.ITERABLE, expected.getSchema().getField(1).getType().getTypeName()); fieldMatchers.add( new RowFieldMatcherIterableFieldAnyOrder( expected.getSchema(), 1, expected.getIterable(1))); matchers.add(allOf(fieldMatchers.toArray(new Matcher[0]))); } assertThat(actualKvs, containsInAnyOrder(matchers.toArray(new Matcher[0]))); return null; } private static <T> Void containsKvRows(List<Row> expectedKvs, Iterable<Row> actualKvs) { List<Matcher<? super Row>> matchers = new ArrayList<>(); for (Row expected : expectedKvs) { matchers.add(new KvRowMatcher(equalTo(expected.getRow(0)), equalTo(expected.getRow(1)))); } assertThat(actualKvs, containsInAnyOrder(matchers.toArray(new Matcher[0]))); return null; } public static class KvRowMatcher extends TypeSafeMatcher<Row> { final Matcher<? super Row> keyMatcher; final Matcher<? super Row> valueMatcher; public KvRowMatcher(Matcher<? super Row> keyMatcher, Matcher<? super Row> valueMatcher) { this.keyMatcher = keyMatcher; this.valueMatcher = valueMatcher; } @Override public boolean matchesSafely(Row kvRow) { return keyMatcher.matches(kvRow.getRow(0)) && valueMatcher.matches(kvRow.getRow(1)); } @Override public void describeTo(Description description) { description .appendText("a KVRow(") .appendValue(keyMatcher) .appendText(", ") .appendValue(valueMatcher) .appendText(")"); } } private static Void containsSingleIterable( Collection<Basic> expected, Iterable<Iterable<Basic>> actual) { Basic[] values = expected.toArray(new Basic[0]); assertThat(actual, containsInAnyOrder(containsInAnyOrder(values))); return null; } }
SampleAnyCombineFn<EnumerationType.Value> sampleAnyCombineFn = new SampleAnyCombineFn<>(100);
public void testAggregateLogicalValuesGlobally() { Collection<BasicEnum> elements = Lists.newArrayList( BasicEnum.of("a", BasicEnum.Test.ONE), BasicEnum.of("a", BasicEnum.Test.TWO)); CombineFn<EnumerationType.Value, ?, Iterable<EnumerationType.Value>> sampleAnyCombineFn = Sample.anyCombineFn(100); Field aggField = Field.of("sampleList", FieldType.array(FieldType.logicalType(BASIC_ENUM_ENUMERATION))); pipeline .apply(Create.of(elements)) .apply( Group.<BasicEnum>globally().aggregateField("enumeration", sampleAnyCombineFn, aggField)) .apply( ParDo.of( new DoFn<Row, List<Integer>>() { @ProcessElement public void process(@Element Row value) { assertThat( value.getArray(0), containsInAnyOrder( BASIC_ENUM_ENUMERATION.valueOf(1), BASIC_ENUM_ENUMERATION.valueOf(2))); } })); pipeline.run(); } private static <T> Void containsKIterableVs( List<Row> expectedKvs, Iterable<Row> actualKvs, T[] emptyArray) { List<Row> list = Lists.newArrayList(actualKvs); List<Matcher<? super Row>> matchers = new ArrayList<>(); for (Row expected : expectedKvs) { List<Matcher> fieldMatchers = Lists.newArrayList(); fieldMatchers.add( new RowFieldMatcherIterableFieldAnyOrder(expected.getSchema(), 0, expected.getRow(0))); assertEquals(TypeName.ITERABLE, expected.getSchema().getField(1).getType().getTypeName()); fieldMatchers.add( new RowFieldMatcherIterableFieldAnyOrder( expected.getSchema(), 1, expected.getIterable(1))); matchers.add(allOf(fieldMatchers.toArray(new Matcher[0]))); } assertThat(actualKvs, containsInAnyOrder(matchers.toArray(new Matcher[0]))); return null; } private static <T> Void containsKvRows(List<Row> expectedKvs, Iterable<Row> actualKvs) { List<Matcher<? super Row>> matchers = new ArrayList<>(); for (Row expected : expectedKvs) { matchers.add(new KvRowMatcher(equalTo(expected.getRow(0)), equalTo(expected.getRow(1)))); } assertThat(actualKvs, containsInAnyOrder(matchers.toArray(new Matcher[0]))); return null; } public static class KvRowMatcher extends TypeSafeMatcher<Row> { final Matcher<? super Row> keyMatcher; final Matcher<? super Row> valueMatcher; public KvRowMatcher(Matcher<? super Row> keyMatcher, Matcher<? super Row> valueMatcher) { this.keyMatcher = keyMatcher; this.valueMatcher = valueMatcher; } @Override public boolean matchesSafely(Row kvRow) { return keyMatcher.matches(kvRow.getRow(0)) && valueMatcher.matches(kvRow.getRow(1)); } @Override public void describeTo(Description description) { description .appendText("a KVRow(") .appendValue(keyMatcher) .appendText(", ") .appendValue(valueMatcher) .appendText(")"); } } private static Void containsSingleIterable( Collection<Basic> expected, Iterable<Iterable<Basic>> actual) { Basic[] values = expected.toArray(new Basic[0]); assertThat(actual, containsInAnyOrder(containsInAnyOrder(values))); return null; } }
class BasicEnum { enum Test { ZERO, ONE, TWO }; abstract String getKey(); abstract Test getEnumeration(); static BasicEnum of(String key, Test value) { return new AutoValue_GroupTest_BasicEnum(key, value); } }
class BasicEnum { enum Test { ZERO, ONE, TWO }; abstract String getKey(); abstract Test getEnumeration(); static BasicEnum of(String key, Test value) { return new AutoValue_GroupTest_BasicEnum(key, value); } }
I'm not sure why that was done for methods, I think it could be a different env but still a class env, we can check maybe. But, since we've been defining fields in the proper (or most relevant scope) why would be go back and define it in a less accurate (wider) scope? The issue you are trying to solve only applies to the default value expression. I would rather we use a correct env (where the other fields are not visible, maybe somewhat similar to what was done in https://github.com/ballerina-platform/ballerina-lang/pull/35326) than change how the field itself is defined.
private void analyzeModuleConfigurableAmbiguity(BLangPackage pkgNode) { if (pkgNode.moduleContextDataHolder == null) { return; } ModuleDescriptor rootModule = pkgNode.moduleContextDataHolder.descriptor(); Set<BVarSymbol> configVars = symResolver.getConfigVarSymbolsIncludingImportedModules(pkgNode.symbol); String rootOrgName = rootModule.org().value(); String rootModuleName = rootModule.packageName().value(); Map<String, PackageID> configKeys = getModuleKeys(configVars, rootOrgName); for (BVarSymbol variable : configVars) { String moduleName = variable.pkgID.name.value; String orgName = variable.pkgID.orgName.value; String varName = variable.name.value; validateMapConfigVariable(orgName + "." + moduleName + "." + varName, variable, configKeys); if (orgName.equals(rootOrgName)) { validateMapConfigVariable(moduleName + "." + varName, variable, configKeys); if (moduleName.equals(rootModuleName) && !(varName.equals(moduleName))) { validateMapConfigVariable(varName, variable, configKeys); } } } }
ModuleDescriptor rootModule = pkgNode.moduleContextDataHolder.descriptor();
private void analyzeModuleConfigurableAmbiguity(BLangPackage pkgNode) { if (pkgNode.moduleContextDataHolder == null) { return; } ModuleDescriptor rootModule = pkgNode.moduleContextDataHolder.descriptor(); Set<BVarSymbol> configVars = symResolver.getConfigVarSymbolsIncludingImportedModules(pkgNode.symbol); String rootOrgName = rootModule.org().value(); String rootModuleName = rootModule.packageName().value(); Map<String, PackageID> configKeys = getModuleKeys(configVars, rootOrgName); for (BVarSymbol variable : configVars) { String moduleName = variable.pkgID.name.value; String orgName = variable.pkgID.orgName.value; String varName = variable.name.value; validateMapConfigVariable(orgName + "." + moduleName + "." + varName, variable, configKeys); if (orgName.equals(rootOrgName)) { validateMapConfigVariable(moduleName + "." + varName, variable, configKeys); if (moduleName.equals(rootModuleName) && !(varName.equals(moduleName))) { validateMapConfigVariable(varName, variable, configKeys); } } } }
class representing a service-decl or object-ctor with service prefix AttachPoint.Point attachedPoint; Set<Flag> flagSet = classDefinition.flagSet; if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) { attachedPoint = AttachPoint.Point.SERVICE; }
class representing a service-decl or object-ctor with service prefix AttachPoint.Point attachedPoint; Set<Flag> flagSet = classDefinition.flagSet; if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) { attachedPoint = AttachPoint.Point.SERVICE; }
this change should be a separate hotfix
public void testSerializationOfUnknownShuffleDescriptor() throws Exception { ShuffleDescriptor shuffleDescriptor = new UnknownShuffleDescriptor(resultPartitionID); ShuffleDescriptor shuffleDescriptorCopy = CommonTestUtils.createCopySerializable(shuffleDescriptor); assertThat(shuffleDescriptorCopy, instanceOf(UnknownShuffleDescriptor.class)); assertThat(shuffleDescriptorCopy.getResultPartitionID(), is(resultPartitionID)); assertThat(shuffleDescriptorCopy.isUnknown(), is(true)); }
assertThat(shuffleDescriptorCopy.getResultPartitionID(), is(resultPartitionID));
public void testSerializationOfUnknownShuffleDescriptor() throws IOException { ShuffleDescriptor shuffleDescriptor = new UnknownShuffleDescriptor(resultPartitionID); ShuffleDescriptor shuffleDescriptorCopy = CommonTestUtils.createCopySerializable(shuffleDescriptor); assertThat(shuffleDescriptorCopy, instanceOf(UnknownShuffleDescriptor.class)); assertThat(shuffleDescriptorCopy.getResultPartitionID(), is(resultPartitionID)); assertThat(shuffleDescriptorCopy.isUnknown(), is(true)); }
class ResultPartitionDeploymentDescriptorTest extends TestLogger { private static final IntermediateDataSetID resultId = new IntermediateDataSetID(); private static final IntermediateResultPartitionID partitionId = new IntermediateResultPartitionID(); private static final ExecutionAttemptID producerExecutionId = new ExecutionAttemptID(); private static final ResultPartitionType partitionType = ResultPartitionType.PIPELINED; private static final int numberOfSubpartitions = 24; private static final int connectionIndex = 10; private static final PartitionDescriptor partitionDescriptor = new PartitionDescriptor( resultId, partitionId, partitionType, numberOfSubpartitions, connectionIndex); private static final ResultPartitionID resultPartitionID = new ResultPartitionID(partitionId, producerExecutionId); private static final ResourceID producerLocation = new ResourceID("producerLocation"); private static final InetSocketAddress address = new InetSocketAddress("localhost", 10000); private static final ConnectionID connectionID = new ConnectionID(address, connectionIndex); /** * Tests simple de/serialization with {@link UnknownShuffleDescriptor}. */ @Test /** * Tests simple de/serialization with {@link NettyShuffleDescriptor}. */ @Test public void testSerializationWithNettyShuffleDescriptor() throws Exception { ShuffleDescriptor shuffleDescriptor = new NettyShuffleDescriptor( producerLocation, new NettyShuffleDescriptor.NetworkPartitionConnectionInfo(connectionID), resultPartitionID, false); ResultPartitionDeploymentDescriptor copy = createCopyAndVerifyResultPartitionDeploymentDescriptor(shuffleDescriptor); assertThat(copy.getShuffleDescriptor(), instanceOf(NettyShuffleDescriptor.class)); NettyShuffleDescriptor shuffleDescriptorCopy = (NettyShuffleDescriptor) copy.getShuffleDescriptor(); assertThat(shuffleDescriptorCopy.getResultPartitionID(), is(resultPartitionID)); assertThat(shuffleDescriptorCopy.isUnknown(), is(false)); assertThat(shuffleDescriptorCopy.isLocalTo(producerLocation), is(true)); assertThat(shuffleDescriptorCopy.getConnectionId(), is(connectionID)); } @Test public void testReleasedOnConsumptionFlag() { for (ResultPartitionType partitionType : ResultPartitionType.values()) { ResultPartitionDeploymentDescriptor partitionDescriptor = new ResultPartitionDeploymentDescriptor( new PartitionDescriptor(resultId, partitionId, partitionType, numberOfSubpartitions, connectionIndex), NettyShuffleDescriptorBuilder.newBuilder().setBlocking(partitionType.isBlocking()).buildLocal(), 1, true ); if (partitionType == ResultPartitionType.BLOCKING) { assertThat(partitionDescriptor.isReleasedOnConsumption(), is(false)); } else { assertThat(partitionDescriptor.isReleasedOnConsumption(), is(true)); } } } @Test(expected = IllegalArgumentException.class) public void testIncompatibleReleasedOnConsumptionFlag() { new ResultPartitionDeploymentDescriptor( partitionDescriptor, NettyShuffleDescriptorBuilder.newBuilder().setBlocking(false).buildLocal(), 1, true, false); } private static ResultPartitionDeploymentDescriptor createCopyAndVerifyResultPartitionDeploymentDescriptor( ShuffleDescriptor shuffleDescriptor) throws IOException { ResultPartitionDeploymentDescriptor orig = new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, numberOfSubpartitions, true); ResultPartitionDeploymentDescriptor copy = CommonTestUtils.createCopySerializable(orig); verifyResultPartitionDeploymentDescriptorCopy(copy); return copy; } private static void verifyResultPartitionDeploymentDescriptorCopy(ResultPartitionDeploymentDescriptor copy) { assertThat(copy.getResultId(), is(resultId)); assertThat(copy.getPartitionId(), is(partitionId)); assertThat(copy.getPartitionType(), is(partitionType)); assertThat(copy.getNumberOfSubpartitions(), is(numberOfSubpartitions)); assertThat(copy.sendScheduleOrUpdateConsumersMessage(), is(true)); } }
class ResultPartitionDeploymentDescriptorTest extends TestLogger { private static final IntermediateDataSetID resultId = new IntermediateDataSetID(); private static final IntermediateResultPartitionID partitionId = new IntermediateResultPartitionID(); private static final ExecutionAttemptID producerExecutionId = new ExecutionAttemptID(); private static final ResultPartitionType partitionType = ResultPartitionType.PIPELINED; private static final int numberOfSubpartitions = 24; private static final int connectionIndex = 10; private static final PartitionDescriptor partitionDescriptor = new PartitionDescriptor( resultId, partitionId, partitionType, numberOfSubpartitions, connectionIndex); private static final ResultPartitionID resultPartitionID = new ResultPartitionID(partitionId, producerExecutionId); private static final ResourceID producerLocation = new ResourceID("producerLocation"); private static final InetSocketAddress address = new InetSocketAddress("localhost", 10000); private static final ConnectionID connectionID = new ConnectionID(address, connectionIndex); /** * Tests simple de/serialization with {@link UnknownShuffleDescriptor}. */ @Test /** * Tests simple de/serialization with {@link NettyShuffleDescriptor}. */ @Test public void testSerializationWithNettyShuffleDescriptor() throws IOException { ShuffleDescriptor shuffleDescriptor = new NettyShuffleDescriptor( producerLocation, new NetworkPartitionConnectionInfo(connectionID), resultPartitionID, false); ResultPartitionDeploymentDescriptor copy = createCopyAndVerifyResultPartitionDeploymentDescriptor(shuffleDescriptor); assertThat(copy.getShuffleDescriptor(), instanceOf(NettyShuffleDescriptor.class)); NettyShuffleDescriptor shuffleDescriptorCopy = (NettyShuffleDescriptor) copy.getShuffleDescriptor(); assertThat(shuffleDescriptorCopy.getResultPartitionID(), is(resultPartitionID)); assertThat(shuffleDescriptorCopy.isUnknown(), is(false)); assertThat(shuffleDescriptorCopy.isLocalTo(producerLocation), is(true)); assertThat(shuffleDescriptorCopy.getConnectionId(), is(connectionID)); } @Test public void testReleasedOnConsumptionFlag() { for (ResultPartitionType partitionType : ResultPartitionType.values()) { ResultPartitionDeploymentDescriptor partitionDescriptor = new ResultPartitionDeploymentDescriptor( new PartitionDescriptor(resultId, partitionId, partitionType, numberOfSubpartitions, connectionIndex), NettyShuffleDescriptorBuilder.newBuilder().setBlocking(partitionType.isBlocking()).buildLocal(), 1, true ); if (partitionType == ResultPartitionType.BLOCKING) { assertThat(partitionDescriptor.isReleasedOnConsumption(), is(false)); } else { assertThat(partitionDescriptor.isReleasedOnConsumption(), is(true)); } } } @Test(expected = IllegalArgumentException.class) public void testIncompatibleReleaseTypeManual() { new ResultPartitionDeploymentDescriptor( partitionDescriptor, NettyShuffleDescriptorBuilder.newBuilder().setBlocking(false).buildLocal(), 1, true, ReleaseType.MANUAL); } private static ResultPartitionDeploymentDescriptor createCopyAndVerifyResultPartitionDeploymentDescriptor( ShuffleDescriptor shuffleDescriptor) throws IOException { ResultPartitionDeploymentDescriptor orig = new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, numberOfSubpartitions, true); ResultPartitionDeploymentDescriptor copy = CommonTestUtils.createCopySerializable(orig); verifyResultPartitionDeploymentDescriptorCopy(copy); return copy; } private static void verifyResultPartitionDeploymentDescriptorCopy(ResultPartitionDeploymentDescriptor copy) { assertThat(copy.getResultId(), is(resultId)); assertThat(copy.getPartitionId(), is(partitionId)); assertThat(copy.getPartitionType(), is(partitionType)); assertThat(copy.getNumberOfSubpartitions(), is(numberOfSubpartitions)); assertThat(copy.sendScheduleOrUpdateConsumersMessage(), is(true)); } }
I think the comments I left in the other location setting this could apply everywhere
public Response<PathProperties> readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context) { Context newContext; options = options == null ? new ReadToFileOptions() : options; if (options.isUpn() != null) { HttpHeaders headers = new HttpHeaders(); headers.set("x-ms-upn", options.isUpn() ? "true" : "false"); if (context == null) { newContext = new Context(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers); } else { newContext = context.addData(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers); } } else { newContext = null; } ReadToFileOptions finalOptions = options; return DataLakeImplUtils.returnOrConvertException(() -> { Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse( new BlobDownloadToFileOptions(finalOptions.getFilePath()) .setRange(Transforms.toBlobRange(finalOptions.getRange())) .setParallelTransferOptions(finalOptions.getParallelTransferOptions()) .setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(finalOptions.getDownloadRetryOptions())) .setRequestConditions(Transforms.toBlobRequestConditions(finalOptions.getDataLakeRequestConditions())) .setRetrieveContentRangeMd5(finalOptions.isRangeGetContentMd5()) .setOpenOptions(finalOptions.getOpenOptions()), timeout, newContext); return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)); }, LOGGER); }
options = options == null ? new ReadToFileOptions() : options;
public Response<PathProperties> readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context) { context = BuilderHelper.addUpnHeader(() -> (options == null) ? null : options.isUpn(), context); Context finalContext = context; return DataLakeImplUtils.returnOrConvertException(() -> { Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse( new BlobDownloadToFileOptions(options.getFilePath()) .setRange(Transforms.toBlobRange(options.getRange())) .setParallelTransferOptions(options.getParallelTransferOptions()) .setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(options.getDownloadRetryOptions())) .setRequestConditions(Transforms.toBlobRequestConditions(options.getDataLakeRequestConditions())) .setRetrieveContentRangeMd5(options.isRangeGetContentMd5()) .setOpenOptions(options.getOpenOptions()), timeout, finalContext); return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)); }, LOGGER); }
class DataLakeFileClient extends DataLakePathClient { /** * Indicates the maximum number of bytes that can be sent in a call to upload. */ private static final long MAX_APPEND_FILE_BYTES = DataLakeFileAsyncClient.MAX_APPEND_FILE_BYTES; private static final ClientLogger LOGGER = new ClientLogger(DataLakeFileClient.class); private final DataLakeFileAsyncClient dataLakeFileAsyncClient; DataLakeFileClient(DataLakeFileAsyncClient pathAsyncClient, BlockBlobClient blockBlobClient) { super(pathAsyncClient, blockBlobClient); this.dataLakeFileAsyncClient = pathAsyncClient; } private DataLakeFileClient(DataLakePathClient dataLakePathClient) { super(dataLakePathClient.dataLakePathAsyncClient, dataLakePathClient.blockBlobClient); this.dataLakeFileAsyncClient = new DataLakeFileAsyncClient(dataLakePathClient.dataLakePathAsyncClient); } /** * Gets the URL of the file represented by this client on the Data Lake service. * * @return the URL. */ public String getFileUrl() { return getPathUrl(); } /** * Gets the path of this file, not including the name of the resource itself. * * @return The path of the file. */ public String getFilePath() { return getObjectPath(); } /** * Gets the name of this file, not including its full path. * * @return The name of the file. */ public String getFileName() { return getObjectName(); } /** * Creates a new {@link DataLakeFileClient} with the specified {@code customerProvidedKey}. * * @param customerProvidedKey the {@link CustomerProvidedKey} for the blob, * pass {@code null} to use no customer provided key. * @return a {@link DataLakeFileClient} with the specified {@code customerProvidedKey}. */ public DataLakeFileClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) { return new DataLakeFileClient(dataLakeFileAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey), blockBlobClient.getCustomerProvidedKeyClient(Transforms.toBlobCustomerProvidedKey(customerProvidedKey))); } /** * Deletes a file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.delete --> * <pre> * client.delete& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.delete --> * * <p>For more information see the * <a href="https: * Docs</a></p> */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete() { deleteWithResponse(null, null, Context.NONE).getValue(); } /** * Deletes a file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * client.deleteWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context) { Mono<Response<Void>> response = dataLakePathAsyncClient.deleteWithResponse(null, requestConditions, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } /** * Deletes a file if it exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists --> * <pre> * client.deleteIfExists& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists --> * * <p>For more information see the * <a href="https: * Docs</a></p> * @return {@code true} if file is successfully deleted, {@code false} if the file does not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public boolean deleteIfExists() { return deleteIfExistsWithResponse(new DataLakePathDeleteOptions(), null, Context.NONE).getValue(); } /** * Deletes a file if it exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * DataLakePathDeleteOptions options = new DataLakePathDeleteOptions& * .setRequestConditions& * * Response&lt;Boolean&gt; response = client.deleteIfExistsWithResponse& * if & * System.out.println& * & * System.out.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param options {@link DataLakePathDeleteOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. If {@link Response}'s status code is 200, the file * was successfully deleted. If status code is 404, the file does not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Boolean> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context) { return StorageImplUtils.blockWithOptionalTimeout(dataLakeFileAsyncClient .deleteIfExistsWithResponse(options, context), timeout); } /** * Creates a new file. By default, this method will not overwrite an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param length The exact length of the data. It is important that this value match precisely the length of the * data provided in the {@link InputStream}. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(InputStream data, long length) { return upload(data, length, false); } /** * Creates a new file. By default, this method will not overwrite an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(BinaryData data) { return upload(data, false); } /** * Creates a new file, or updates the content of an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * boolean overwrite = false; * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param length The exact length of the data. It is important that this value match precisely the length of the * data provided in the {@link InputStream}. * @param overwrite Whether to overwrite, should data exist on the file. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(InputStream data, long length, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return uploadWithResponse(new FileParallelUploadOptions(data, length).setRequestConditions(requestConditions), null, Context.NONE).getValue(); } /** * Creates a new file, or updates the content of an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * boolean overwrite = false; * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param overwrite Whether to overwrite, should data exist on the file. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(BinaryData data, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return uploadWithResponse(new FileParallelUploadOptions(data).setRequestConditions(requestConditions), null, Context.NONE).getValue(); } /** * Creates a new file. * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * client.uploadWithResponse& * .setParallelTransferOptions& * .setMetadata& * .setPermissions& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse * * @param options {@link FileParallelUploadOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context) { Objects.requireNonNull(options); Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadWithResponse(options) .contextWrite(FluxUtil.toReactorContext(context)); try { return StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Creates a file, with the content of the specified file. By default, this method will not overwrite an * existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * try & * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath) { uploadFromFile(filePath, false); } /** * Creates a file, with the content of the specified file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * try & * boolean overwrite = false; * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @param overwrite Whether to overwrite, should the file already exist * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath, boolean overwrite) { DataLakeRequestConditions requestConditions = null; if (!overwrite) { if (UploadUtils.shouldUploadInChunks(filePath, ModelHelper.FILE_DEFAULT_MAX_SINGLE_UPLOAD_SIZE, LOGGER) && exists()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS)); } requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } uploadFromFile(filePath, null, null, null, requestConditions, null); } /** * Creates a file, with the content of the specified file. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Duration timeout) { Mono<Void> upload = this.dataLakeFileAsyncClient.uploadFromFile( filePath, parallelTransferOptions, headers, metadata, requestConditions); try { StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Creates a file, with the content of the specified file. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * Response&lt;PathInfo&gt; response = client.uploadFromFileWithResponse& * metadata, requestConditions, timeout, new Context& * System.out.printf& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse * * @param filePath Path of the file to upload * @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return Response containing information about the uploaded path. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context) { Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadFromFileWithResponse( filePath, parallelTransferOptions, headers, metadata, requestConditions) .contextWrite(FluxUtil.toReactorContext(context)); try { return StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append * <pre> * client.append& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. */ @ServiceMethod(returns = ReturnType.SINGLE) public void append(InputStream data, long fileOffset, long length) { appendWithResponse(data, fileOffset, length, null, null, Context.NONE); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append * <pre> * client.append& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. */ @ServiceMethod(returns = ReturnType.SINGLE) public void append(BinaryData data, long fileOffset) { appendWithResponse(data, fileOffset, null, null, null, Context.NONE); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. * @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the * received data and fail the request if it does not match the provided MD5. * @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on * the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context) { DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions() .setLeaseId(leaseId) .setContentHash(contentMd5) .setFlush(null); return appendWithResponse(data, fileOffset, length, appendOptions, timeout, context); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * byte[] contentMd5 = new byte[0]; & * DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions& * .setLeaseId& * .setContentHash& * .setFlush& * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. * @param appendOptions {@link DataLakeFileAppendOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, BlobAsyncClient.BLOB_DEFAULT_UPLOAD_BLOCK_SIZE, true); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fbb, fileOffset, length, appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the * received data and fail the request if it does not match the provided MD5. * @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on * the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fluxData = data.toFluxByteBuffer(); DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions() .setLeaseId(leaseId) .setContentHash(contentMd5) .setFlush(null); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset, data.getLength(), appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * BinaryData binaryData = BinaryData.fromStream& * FileRange range = new FileRange& * byte[] contentMd5 = new byte[0]; & * DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions& * .setLeaseId& * .setContentHash& * .setFlush& * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param appendOptions {@link DataLakeFileAppendOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fluxData = data.toFluxByteBuffer(); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset, data.getLength(), appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * <p>By default this method will not overwrite existing data.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush * <pre> * client.flush& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @return Information about the created resource. * @deprecated See {@link */ @ServiceMethod(returns = ReturnType.SINGLE) @Deprecated public PathInfo flush(long position) { return flush(position, false); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush * <pre> * boolean overwrite = true; * client.flush& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param overwrite Whether to overwrite, should data exist on the file. * * @return Information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo flush(long position, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return flushWithResponse(position, false, false, null, requestConditions, null, Context.NONE).getValue(); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * boolean retainUncommittedData = false; * boolean close = false; * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * Response&lt;PathInfo&gt; response = client.flushWithResponse& * requestConditions, timeout, new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param retainUncommittedData Whether uncommitted data is to be retained after the operation. * @param close Whether a file changed event raised indicates completion (true) or modification (false). * @param httpHeaders {@link PathHttpHeaders httpHeaders} * @param requestConditions {@link DataLakeRequestConditions requestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing the information of the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context) { DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions() .setUncommittedDataRetained(retainUncommittedData) .setClose(close) .setPathHttpHeaders(httpHeaders) .setRequestConditions(requestConditions); return flushWithResponse(position, flushOptions, timeout, context); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * boolean retainUncommittedData = false; * boolean close = false; * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * Integer leaseDuration = 15; * * DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions& * .setUncommittedDataRetained& * .setClose& * .setPathHttpHeaders& * .setRequestConditions& * .setLeaseAction& * .setLeaseDuration& * .setProposedLeaseId& * * Response&lt;PathInfo&gt; response = client.flushWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param flushOptions {@link DataLakeFileFlushOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing the information of the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context) { Mono<Response<PathInfo>> response = dataLakeFileAsyncClient.flushWithResponse(position, flushOptions, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } /** * Reads the entire file into an output stream. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.read * <pre> * client.read& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.read * * <p>For more information, see the * <a href="https: * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null */ public void read(OutputStream stream) { readWithResponse(stream, null, null, null, false, null, Context.NONE); } /** * Reads a range of bytes from a file into an output stream. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * * System.out.printf& * client.readWithResponse& * timeout, new Context& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse * * <p>For more information, see the * <a href="https: * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @param range {@link FileRange} * @param options {@link DownloadRetryOptions} * @param requestConditions {@link DataLakeRequestConditions} * @param getRangeContentMd5 Whether the contentMD5 for the specified file range should be returned. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null */ public FileReadResponse readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { BlobDownloadResponse response = blockBlobClient.downloadWithResponse(stream, Transforms.toBlobRange(range), Transforms.toBlobDownloadRetryOptions(options), Transforms.toBlobRequestConditions(requestConditions), getRangeContentMd5, timeout, context); return Transforms.toFileReadResponse(response); }, LOGGER); } /** * Opens a file input stream to download the file. Locks on ETags. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream --> * <pre> * DataLakeFileOpenInputStreamResult inputStream = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream --> * * @return An {@link InputStream} object that represents the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream() { return openInputStream(null); } /** * Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option * is not specified. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * <pre> * DataLakeFileInputStreamOptions options = new DataLakeFileInputStreamOptions& * .setRequestConditions& * DataLakeFileOpenInputStreamResult streamResult = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * * @param options {@link DataLakeFileInputStreamOptions} * @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options) { return openInputStream(options, Context.NONE); } /** * Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option * is not specified. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * <pre> * options = new DataLakeFileInputStreamOptions& * .setRequestConditions& * DataLakeFileOpenInputStreamResult stream = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * * @param options {@link DataLakeFileInputStreamOptions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options, Context context) { Context newContext; options = options == null ? new DataLakeFileInputStreamOptions() : options; if (options.isUpn() != null) { HttpHeaders headers = new HttpHeaders(); headers.set("x-ms-upn", options.isUpn() ? "true" : "false"); if (context == null) { newContext = new Context(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers); } else { newContext = context.addData(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers); } } else { newContext = null; } BlobInputStreamOptions convertedOptions = Transforms.toBlobInputStreamOptions(options); BlobInputStream inputStream = blockBlobClient.openInputStream(convertedOptions, newContext); return new InternalDataLakeFileOpenInputStreamResult(inputStream, Transforms.toPathProperties(inputStream.getProperties())); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream() { return getOutputStream(null); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * </p> * * @param options {@link DataLakeFileOutputStreamOptions} * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options) { return getOutputStream(options, null); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * </p> * * @param options {@link DataLakeFileOutputStreamOptions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options, Context context) { BlockBlobOutputStreamOptions convertedOptions = Transforms.toBlockBlobOutputStreamOptions(options); return blockBlobClient.getBlobOutputStream(convertedOptions, context); } /** * Reads the entire file into a file specified by the path. * * <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException} * will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(String filePath) { return readToFile(filePath, false); } /** * Reads the entire file into a file specified by the path. * * <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException} * will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param options {@link ReadToFileOptions} * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(ReadToFileOptions options) { return readToFile(options, false); } /** * Reads the entire file into a file specified by the path. * * <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * boolean overwrite = false; & * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @param overwrite Whether to overwrite the file, should the file exist. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(String filePath, boolean overwrite) { Set<OpenOption> openOptions = null; if (overwrite) { openOptions = new HashSet<>(); openOptions.add(StandardOpenOption.CREATE); openOptions.add(StandardOpenOption.TRUNCATE_EXISTING); openOptions.add(StandardOpenOption.READ); openOptions.add(StandardOpenOption.WRITE); } return readToFileWithResponse(filePath, null, null, null, null, false, openOptions, null, Context.NONE) .getValue(); } /** * Reads the entire file into a file specified by the path. * * <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown.</p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * boolean overwrite1 = false; & * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param options {@link ReadToFileOptions} * @param overwrite Whether to overwrite the file, should the file exist. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(ReadToFileOptions options, boolean overwrite) { Set<OpenOption> openOptions = null; if (overwrite) { openOptions = new HashSet<>(); openOptions.add(StandardOpenOption.CREATE); openOptions.add(StandardOpenOption.TRUNCATE_EXISTING); openOptions.add(StandardOpenOption.READ); openOptions.add(StandardOpenOption.WRITE); options.setOpenOptions(openOptions); } return readToFileWithResponse(options, null, Context.NONE) .getValue(); } /** * Reads the entire file into a file specified by the path. * * <p>By default the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate * {@link OpenOption OpenOptions} </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * <pre> * FileRange fileRange = new FileRange& * DownloadRetryOptions downloadRetryOptions = new DownloadRetryOptions& * Set&lt;OpenOption&gt; openOptions = new HashSet&lt;&gt;& * StandardOpenOption.WRITE, StandardOpenOption.READ& * * client.readToFileWithResponse& * downloadRetryOptions, null, false, openOptions, timeout, new Context& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @param range {@link FileRange} * @param parallelTransferOptions {@link ParallelTransferOptions} to use to download to file. Number of parallel * transfers parameter is ignored. * @param downloadRetryOptions {@link DownloadRetryOptions} * @param requestConditions {@link DataLakeRequestConditions} * @param rangeGetContentMd5 Whether the contentMD5 for the specified file range should be returned. * @param openOptions {@link OpenOption OpenOptions} to use to configure how to open or create the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing the file properties and metadata. * @throws UncheckedIOException If an I/O error occurs. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathProperties> readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse( new BlobDownloadToFileOptions(filePath) .setRange(Transforms.toBlobRange(range)).setParallelTransferOptions(parallelTransferOptions) .setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(downloadRetryOptions)) .setRequestConditions(Transforms.toBlobRequestConditions(requestConditions)) .setRetrieveContentRangeMd5(rangeGetContentMd5).setOpenOptions(openOptions), timeout, context); return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)); }, LOGGER); } /** * Reads the entire file into a file specified by the path. * * <p>By default the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate * {@link OpenOption OpenOptions} </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * <pre> * ReadToFileOptions options = new ReadToFileOptions& * options.setRange& * options.setDownloadRetryOptions& * options.setOpenOptions& * StandardOpenOption.WRITE, StandardOpenOption.READ& * options.setParallelTransferOptions& * options.setDataLakeRequestConditions& * options.setRangeGetContentMd5& * * client.readToFileWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * * @param options {@link ReadToFileOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing the file properties and metadata. * @throws UncheckedIOException If an I/O error occurs. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Moves the file to another location within the file system. * For more information see the * <a href="https: * Docs</a>. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename * <pre> * DataLakeDirectoryAsyncClient renamedClient = client.rename& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath Relative path from the file system to rename the file to, excludes the file system name. * For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path * in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt" * @return A {@link DataLakeFileClient} used to interact with the new file created. */ @ServiceMethod(returns = ReturnType.SINGLE) public DataLakeFileClient rename(String destinationFileSystem, String destinationPath) { return renameWithResponse(destinationFileSystem, destinationPath, null, null, null, null).getValue(); } /** * Moves the file to another location within the file system. * For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse * <pre> * DataLakeRequestConditions sourceRequestConditions = new DataLakeRequestConditions& * .setLeaseId& * DataLakeRequestConditions destinationRequestConditions = new DataLakeRequestConditions& * * DataLakeFileClient newRenamedClient = client.renameWithResponse& * sourceRequestConditions, destinationRequestConditions, timeout, new Context& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath Relative path from the file system to rename the file to, excludes the file system name. * For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path * in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt" * @param sourceRequestConditions {@link DataLakeRequestConditions} against the source. * @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A {@link Response} whose {@link Response * used to interact with the file created. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<DataLakeFileClient> renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context) { Mono<Response<DataLakeFileClient>> response = dataLakeFileAsyncClient.renameWithResponse(destinationFileSystem, destinationPath, sourceRequestConditions, destinationRequestConditions, context) .map(asyncResponse -> new SimpleResponse<>(asyncResponse.getRequest(), asyncResponse.getStatusCode(), asyncResponse.getHeaders(), new DataLakeFileClient(new DataLakeFileAsyncClient(asyncResponse.getValue()), new SpecializedBlobClientBuilder() .blobAsyncClient(asyncResponse.getValue().blockBlobAsyncClient) .buildBlockBlobClient()))); Response<DataLakeFileClient> resp = StorageImplUtils.blockWithOptionalTimeout(response, timeout); return new SimpleResponse<>(resp, new DataLakeFileClient(resp.getValue())); } /** * Opens an input stream to query the file. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * <pre> * String expression = &quot;SELECT * from BlobStorage&quot;; * InputStream inputStream = client.openQueryInputStream& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * * @param expression The query expression. * @return An <code>InputStream</code> object that represents the stream to use for reading the query response. */ public InputStream openQueryInputStream(String expression) { return openQueryInputStreamWithResponse(new FileQueryOptions(expression)).getValue(); } /** * Opens an input stream to query the file. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * <pre> * String expression = &quot;SELECT * from BlobStorage&quot;; * FileQuerySerialization input = new FileQueryDelimitedSerialization& * .setColumnSeparator& * .setEscapeChar& * .setRecordSeparator& * .setHeadersPresent& * .setFieldQuote& * FileQuerySerialization output = new FileQueryJsonSerialization& * .setRecordSeparator& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * Consumer&lt;FileQueryError&gt; errorConsumer = System.out::println; * Consumer&lt;FileQueryProgress&gt; progressConsumer = progress -&gt; System.out.println& * + progress.getBytesScanned& * FileQueryOptions queryOptions = new FileQueryOptions& * .setInputSerialization& * .setOutputSerialization& * .setRequestConditions& * .setErrorConsumer& * .setProgressConsumer& * * InputStream inputStream = client.openQueryInputStreamWithResponse& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * * @param queryOptions {@link FileQueryOptions The query options}. * @return A response containing status code and HTTP headers including an <code>InputStream</code> object * that represents the stream to use for reading the query response. */ public Response<InputStream> openQueryInputStreamWithResponse(FileQueryOptions queryOptions) { FileQueryAsyncResponse response = dataLakeFileAsyncClient.queryWithResponse(queryOptions) .block(); if (response == null) { throw LOGGER.logExceptionAsError(new IllegalStateException("Query response cannot be null")); } return new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), new FluxInputStream(response.getValue()), response.getDeserializedHeaders()); } /** * Queries an entire file into an output stream. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.query * <pre> * ByteArrayOutputStream queryData = new ByteArrayOutputStream& * String expression = &quot;SELECT * from BlobStorage&quot;; * client.query& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.query * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @param expression The query expression. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null. */ public void query(OutputStream stream, String expression) { queryWithResponse(new FileQueryOptions(expression, stream), null, Context.NONE); } /** * Queries an entire file into an output stream. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse * <pre> * ByteArrayOutputStream queryData = new ByteArrayOutputStream& * String expression = &quot;SELECT * from BlobStorage&quot;; * FileQueryJsonSerialization input = new FileQueryJsonSerialization& * .setRecordSeparator& * FileQueryDelimitedSerialization output = new FileQueryDelimitedSerialization& * .setEscapeChar& * .setColumnSeparator& * .setRecordSeparator& * .setFieldQuote& * .setHeadersPresent& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * Consumer&lt;FileQueryError&gt; errorConsumer = System.out::println; * Consumer&lt;FileQueryProgress&gt; progressConsumer = progress -&gt; System.out.println& * + progress.getBytesScanned& * FileQueryOptions queryOptions = new FileQueryOptions& * .setInputSerialization& * .setOutputSerialization& * .setRequestConditions& * .setErrorConsumer& * .setProgressConsumer& * System.out.printf& * client.queryWithResponse& * .getStatusCode& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse * * @param queryOptions {@link FileQueryOptions The query options}. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing status code and HTTP headers. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null. */ public FileQueryResponse queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { BlobQueryResponse response = blockBlobClient.queryWithResponse( Transforms.toBlobQueryOptions(queryOptions), timeout, context); return Transforms.toFileQueryResponse(response); }, LOGGER); } /** * Schedules the file for deletion. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion * <pre> * FileScheduleDeletionOptions options = new FileScheduleDeletionOptions& * client.scheduleDeletion& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion * * @param options Schedule deletion parameters. */ @ServiceMethod(returns = ReturnType.SINGLE) public void scheduleDeletion(FileScheduleDeletionOptions options) { this.scheduleDeletionWithResponse(options, null, Context.NONE); } /** * Schedules the file for deletion. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse * <pre> * FileScheduleDeletionOptions options = new FileScheduleDeletionOptions& * Context context = new Context& * * client.scheduleDeletionWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse * * @param options Schedule deletion parameters. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing status code and HTTP headers. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context) { Mono<Response<Void>> response = this.dataLakeFileAsyncClient.scheduleDeletionWithResponse(options, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } }
class DataLakeFileClient extends DataLakePathClient { /** * Indicates the maximum number of bytes that can be sent in a call to upload. */ private static final long MAX_APPEND_FILE_BYTES = DataLakeFileAsyncClient.MAX_APPEND_FILE_BYTES; private static final ClientLogger LOGGER = new ClientLogger(DataLakeFileClient.class); private final DataLakeFileAsyncClient dataLakeFileAsyncClient; DataLakeFileClient(DataLakeFileAsyncClient pathAsyncClient, BlockBlobClient blockBlobClient) { super(pathAsyncClient, blockBlobClient); this.dataLakeFileAsyncClient = pathAsyncClient; } private DataLakeFileClient(DataLakePathClient dataLakePathClient) { super(dataLakePathClient.dataLakePathAsyncClient, dataLakePathClient.blockBlobClient); this.dataLakeFileAsyncClient = new DataLakeFileAsyncClient(dataLakePathClient.dataLakePathAsyncClient); } /** * Gets the URL of the file represented by this client on the Data Lake service. * * @return the URL. */ public String getFileUrl() { return getPathUrl(); } /** * Gets the path of this file, not including the name of the resource itself. * * @return The path of the file. */ public String getFilePath() { return getObjectPath(); } /** * Gets the name of this file, not including its full path. * * @return The name of the file. */ public String getFileName() { return getObjectName(); } /** * Creates a new {@link DataLakeFileClient} with the specified {@code customerProvidedKey}. * * @param customerProvidedKey the {@link CustomerProvidedKey} for the blob, * pass {@code null} to use no customer provided key. * @return a {@link DataLakeFileClient} with the specified {@code customerProvidedKey}. */ public DataLakeFileClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) { return new DataLakeFileClient(dataLakeFileAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey), blockBlobClient.getCustomerProvidedKeyClient(Transforms.toBlobCustomerProvidedKey(customerProvidedKey))); } /** * Deletes a file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.delete --> * <pre> * client.delete& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.delete --> * * <p>For more information see the * <a href="https: * Docs</a></p> */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete() { deleteWithResponse(null, null, Context.NONE).getValue(); } /** * Deletes a file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * client.deleteWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context) { Mono<Response<Void>> response = dataLakePathAsyncClient.deleteWithResponse(null, requestConditions, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } /** * Deletes a file if it exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists --> * <pre> * client.deleteIfExists& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists --> * * <p>For more information see the * <a href="https: * Docs</a></p> * @return {@code true} if file is successfully deleted, {@code false} if the file does not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public boolean deleteIfExists() { return deleteIfExistsWithResponse(new DataLakePathDeleteOptions(), null, Context.NONE).getValue(); } /** * Deletes a file if it exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * DataLakePathDeleteOptions options = new DataLakePathDeleteOptions& * .setRequestConditions& * * Response&lt;Boolean&gt; response = client.deleteIfExistsWithResponse& * if & * System.out.println& * & * System.out.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param options {@link DataLakePathDeleteOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. If {@link Response}'s status code is 200, the file * was successfully deleted. If status code is 404, the file does not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Boolean> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context) { return StorageImplUtils.blockWithOptionalTimeout(dataLakeFileAsyncClient .deleteIfExistsWithResponse(options, context), timeout); } /** * Creates a new file. By default, this method will not overwrite an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param length The exact length of the data. It is important that this value match precisely the length of the * data provided in the {@link InputStream}. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(InputStream data, long length) { return upload(data, length, false); } /** * Creates a new file. By default, this method will not overwrite an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(BinaryData data) { return upload(data, false); } /** * Creates a new file, or updates the content of an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * boolean overwrite = false; * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param length The exact length of the data. It is important that this value match precisely the length of the * data provided in the {@link InputStream}. * @param overwrite Whether to overwrite, should data exist on the file. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(InputStream data, long length, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return uploadWithResponse(new FileParallelUploadOptions(data, length).setRequestConditions(requestConditions), null, Context.NONE).getValue(); } /** * Creates a new file, or updates the content of an existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload * <pre> * try & * boolean overwrite = false; * client.upload& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload * * @param data The data to write to the blob. The data must be markable. This is in order to support retries. If * the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark * support. * @param overwrite Whether to overwrite, should data exist on the file. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo upload(BinaryData data, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return uploadWithResponse(new FileParallelUploadOptions(data).setRequestConditions(requestConditions), null, Context.NONE).getValue(); } /** * Creates a new file. * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * client.uploadWithResponse& * .setParallelTransferOptions& * .setMetadata& * .setPermissions& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse * * @param options {@link FileParallelUploadOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return Information about the uploaded path. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context) { Objects.requireNonNull(options); Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadWithResponse(options) .contextWrite(FluxUtil.toReactorContext(context)); try { return StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Creates a file, with the content of the specified file. By default, this method will not overwrite an * existing file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * try & * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath) { uploadFromFile(filePath, false); } /** * Creates a file, with the content of the specified file. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * try & * boolean overwrite = false; * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @param overwrite Whether to overwrite, should the file already exist * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath, boolean overwrite) { DataLakeRequestConditions requestConditions = null; if (!overwrite) { if (UploadUtils.shouldUploadInChunks(filePath, ModelHelper.FILE_DEFAULT_MAX_SINGLE_UPLOAD_SIZE, LOGGER) && exists()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS)); } requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } uploadFromFile(filePath, null, null, null, requestConditions, null); } /** * Creates a file, with the content of the specified file. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * client.uploadFromFile& * System.out.println& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile * * @param filePath Path of the file to upload * @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Duration timeout) { Mono<Void> upload = this.dataLakeFileAsyncClient.uploadFromFile( filePath, parallelTransferOptions, headers, metadata, requestConditions); try { StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Creates a file, with the content of the specified file. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentMd5& * .setContentLanguage& * .setContentType& * * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * .setIfUnmodifiedSince& * Long blockSize = 100L * 1024L * 1024L; & * ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions& * * try & * Response&lt;PathInfo&gt; response = client.uploadFromFileWithResponse& * metadata, requestConditions, timeout, new Context& * System.out.printf& * & * System.err.printf& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse * * @param filePath Path of the file to upload * @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return Response containing information about the uploaded path. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context) { Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadFromFileWithResponse( filePath, parallelTransferOptions, headers, metadata, requestConditions) .contextWrite(FluxUtil.toReactorContext(context)); try { return StorageImplUtils.blockWithOptionalTimeout(upload, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append * <pre> * client.append& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. */ @ServiceMethod(returns = ReturnType.SINGLE) public void append(InputStream data, long fileOffset, long length) { appendWithResponse(data, fileOffset, length, null, null, Context.NONE); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append * <pre> * client.append& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. */ @ServiceMethod(returns = ReturnType.SINGLE) public void append(BinaryData data, long fileOffset) { appendWithResponse(data, fileOffset, null, null, null, Context.NONE); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. * @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the * received data and fail the request if it does not match the provided MD5. * @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on * the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context) { DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions() .setLeaseId(leaseId) .setContentHash(contentMd5) .setFlush(null); return appendWithResponse(data, fileOffset, length, appendOptions, timeout, context); } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * byte[] contentMd5 = new byte[0]; & * DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions& * .setLeaseId& * .setContentHash& * .setFlush& * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param length The exact length of the data. * @param appendOptions {@link DataLakeFileAppendOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, BlobAsyncClient.BLOB_DEFAULT_UPLOAD_BLOCK_SIZE, true); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fbb, fileOffset, length, appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the * received data and fail the request if it does not match the provided MD5. * @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on * the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fluxData = data.toFluxByteBuffer(); DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions() .setLeaseId(leaseId) .setContentHash(contentMd5) .setFlush(null); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset, data.getLength(), appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Appends data to the specified resource to later be flushed (written) by a call to flush * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * <pre> * BinaryData binaryData = BinaryData.fromStream& * FileRange range = new FileRange& * byte[] contentMd5 = new byte[0]; & * DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions& * .setLeaseId& * .setContentHash& * .setFlush& * Response&lt;Void&gt; response = client.appendWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param data The data to write to the file. * @param fileOffset The position where the data is to be appended. * @param appendOptions {@link DataLakeFileAppendOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) { Objects.requireNonNull(data); Flux<ByteBuffer> fluxData = data.toFluxByteBuffer(); Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset, data.getLength(), appendOptions, context); try { return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } catch (UncheckedIOException e) { throw LOGGER.logExceptionAsError(e); } } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * <p>By default this method will not overwrite existing data.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush * <pre> * client.flush& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @return Information about the created resource. * @deprecated See {@link */ @ServiceMethod(returns = ReturnType.SINGLE) @Deprecated public PathInfo flush(long position) { return flush(position, false); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush * <pre> * boolean overwrite = true; * client.flush& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param overwrite Whether to overwrite, should data exist on the file. * * @return Information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public PathInfo flush(long position, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return flushWithResponse(position, false, false, null, requestConditions, null, Context.NONE).getValue(); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * boolean retainUncommittedData = false; * boolean close = false; * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * Response&lt;PathInfo&gt; response = client.flushWithResponse& * requestConditions, timeout, new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param retainUncommittedData Whether uncommitted data is to be retained after the operation. * @param close Whether a file changed event raised indicates completion (true) or modification (false). * @param httpHeaders {@link PathHttpHeaders httpHeaders} * @param requestConditions {@link DataLakeRequestConditions requestConditions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing the information of the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context) { DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions() .setUncommittedDataRetained(retainUncommittedData) .setClose(close) .setPathHttpHeaders(httpHeaders) .setRequestConditions(requestConditions); return flushWithResponse(position, flushOptions, timeout, context); } /** * Flushes (writes) data previously appended to the file through a call to append. * The previously uploaded data must be contiguous. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * byte[] contentMd5 = new byte[0]; & * boolean retainUncommittedData = false; * boolean close = false; * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * Integer leaseDuration = 15; * * DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions& * .setUncommittedDataRetained& * .setClose& * .setPathHttpHeaders& * .setRequestConditions& * .setLeaseAction& * .setLeaseDuration& * .setProposedLeaseId& * * Response&lt;PathInfo&gt; response = client.flushWithResponse& * new Context& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse * * <p>For more information, see the * <a href="https: * Docs</a></p> * * @param position The length of the file after all data has been written. * @param flushOptions {@link DataLakeFileFlushOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing the information of the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathInfo> flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context) { Mono<Response<PathInfo>> response = dataLakeFileAsyncClient.flushWithResponse(position, flushOptions, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } /** * Reads the entire file into an output stream. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.read * <pre> * client.read& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.read * * <p>For more information, see the * <a href="https: * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null */ public void read(OutputStream stream) { readWithResponse(stream, null, null, null, false, null, Context.NONE); } /** * Reads a range of bytes from a file into an output stream. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse * <pre> * FileRange range = new FileRange& * DownloadRetryOptions options = new DownloadRetryOptions& * * System.out.printf& * client.readWithResponse& * timeout, new Context& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse * * <p>For more information, see the * <a href="https: * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @param range {@link FileRange} * @param options {@link DownloadRetryOptions} * @param requestConditions {@link DataLakeRequestConditions} * @param getRangeContentMd5 Whether the contentMD5 for the specified file range should be returned. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A response containing status code and HTTP headers. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null */ public FileReadResponse readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { BlobDownloadResponse response = blockBlobClient.downloadWithResponse(stream, Transforms.toBlobRange(range), Transforms.toBlobDownloadRetryOptions(options), Transforms.toBlobRequestConditions(requestConditions), getRangeContentMd5, timeout, context); return Transforms.toFileReadResponse(response); }, LOGGER); } /** * Opens a file input stream to download the file. Locks on ETags. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream --> * <pre> * DataLakeFileOpenInputStreamResult inputStream = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream --> * * @return An {@link InputStream} object that represents the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream() { return openInputStream(null); } /** * Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option * is not specified. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * <pre> * DataLakeFileInputStreamOptions options = new DataLakeFileInputStreamOptions& * .setRequestConditions& * DataLakeFileOpenInputStreamResult streamResult = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * * @param options {@link DataLakeFileInputStreamOptions} * @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options) { return openInputStream(options, Context.NONE); } /** * Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option * is not specified. * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * <pre> * options = new DataLakeFileInputStreamOptions& * .setRequestConditions& * DataLakeFileOpenInputStreamResult stream = client.openInputStream& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream * * @param options {@link DataLakeFileInputStreamOptions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file. * @throws DataLakeStorageException If a storage service error occurred. */ public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options, Context context) { context = BuilderHelper.addUpnHeader(() -> (options == null) ? null : options.isUpn(), context); BlobInputStreamOptions convertedOptions = Transforms.toBlobInputStreamOptions(options); BlobInputStream inputStream = blockBlobClient.openInputStream(convertedOptions, context); return new InternalDataLakeFileOpenInputStreamResult(inputStream, Transforms.toPathProperties(inputStream.getProperties())); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream() { return getOutputStream(null); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * </p> * * @param options {@link DataLakeFileOutputStreamOptions} * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options) { return getOutputStream(options, null); } /** * Creates and opens an output stream to write data to the file. If the file already exists on the service, it * will be overwritten. * <p> * To avoid overwriting, pass "*" to {@link DataLakeRequestConditions * </p> * * @param options {@link DataLakeFileOutputStreamOptions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return The {@link OutputStream} that can be used to write to the file. * @throws DataLakeStorageException If a storage service error occurred. */ public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options, Context context) { BlockBlobOutputStreamOptions convertedOptions = Transforms.toBlockBlobOutputStreamOptions(options); return blockBlobClient.getBlobOutputStream(convertedOptions, context); } /** * Reads the entire file into a file specified by the path. * * <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException} * will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(String filePath) { return readToFile(filePath, false); } /** * Reads the entire file into a file specified by the path. * * <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException} * will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param options {@link ReadToFileOptions} * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(ReadToFileOptions options) { return readToFile(options, false); } /** * Reads the entire file into a file specified by the path. * * <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * boolean overwrite = false; & * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @param overwrite Whether to overwrite the file, should the file exist. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(String filePath, boolean overwrite) { Set<OpenOption> openOptions = null; if (overwrite) { openOptions = new HashSet<>(); openOptions.add(StandardOpenOption.CREATE); openOptions.add(StandardOpenOption.TRUNCATE_EXISTING); openOptions.add(StandardOpenOption.READ); openOptions.add(StandardOpenOption.WRITE); } return readToFileWithResponse(filePath, null, null, null, null, false, openOptions, null, Context.NONE) .getValue(); } /** * Reads the entire file into a file specified by the path. * * <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown.</p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile * <pre> * boolean overwrite1 = false; & * client.readToFile& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile * * <p>For more information, see the * <a href="https: * * @param options {@link ReadToFileOptions} * @param overwrite Whether to overwrite the file, should the file exist. * @return The file properties and metadata. * @throws UncheckedIOException If an I/O error occurs */ @ServiceMethod(returns = ReturnType.SINGLE) public PathProperties readToFile(ReadToFileOptions options, boolean overwrite) { Set<OpenOption> openOptions = null; if (overwrite) { openOptions = new HashSet<>(); openOptions.add(StandardOpenOption.CREATE); openOptions.add(StandardOpenOption.TRUNCATE_EXISTING); openOptions.add(StandardOpenOption.READ); openOptions.add(StandardOpenOption.WRITE); options.setOpenOptions(openOptions); } return readToFileWithResponse(options, null, Context.NONE) .getValue(); } /** * Reads the entire file into a file specified by the path. * * <p>By default the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate * {@link OpenOption OpenOptions} </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * <pre> * FileRange fileRange = new FileRange& * DownloadRetryOptions downloadRetryOptions = new DownloadRetryOptions& * Set&lt;OpenOption&gt; openOptions = new HashSet&lt;&gt;& * StandardOpenOption.WRITE, StandardOpenOption.READ& * * client.readToFileWithResponse& * downloadRetryOptions, null, false, openOptions, timeout, new Context& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * * <p>For more information, see the * <a href="https: * * @param filePath A {@link String} representing the filePath where the downloaded data will be written. * @param range {@link FileRange} * @param parallelTransferOptions {@link ParallelTransferOptions} to use to download to file. Number of parallel * transfers parameter is ignored. * @param downloadRetryOptions {@link DownloadRetryOptions} * @param requestConditions {@link DataLakeRequestConditions} * @param rangeGetContentMd5 Whether the contentMD5 for the specified file range should be returned. * @param openOptions {@link OpenOption OpenOptions} to use to configure how to open or create the file. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing the file properties and metadata. * @throws UncheckedIOException If an I/O error occurs. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<PathProperties> readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse( new BlobDownloadToFileOptions(filePath) .setRange(Transforms.toBlobRange(range)).setParallelTransferOptions(parallelTransferOptions) .setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(downloadRetryOptions)) .setRequestConditions(Transforms.toBlobRequestConditions(requestConditions)) .setRetrieveContentRangeMd5(rangeGetContentMd5).setOpenOptions(openOptions), timeout, context); return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)); }, LOGGER); } /** * Reads the entire file into a file specified by the path. * * <p>By default the file will be created and must not exist, if the file already exists a * {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate * {@link OpenOption OpenOptions} </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * <pre> * ReadToFileOptions options = new ReadToFileOptions& * options.setRange& * options.setDownloadRetryOptions& * options.setOpenOptions& * StandardOpenOption.WRITE, StandardOpenOption.READ& * options.setParallelTransferOptions& * options.setDataLakeRequestConditions& * options.setRangeGetContentMd5& * * client.readToFileWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse * * @param options {@link ReadToFileOptions} * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing the file properties and metadata. * @throws UncheckedIOException If an I/O error occurs. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Moves the file to another location within the file system. * For more information see the * <a href="https: * Docs</a>. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename * <pre> * DataLakeDirectoryAsyncClient renamedClient = client.rename& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath Relative path from the file system to rename the file to, excludes the file system name. * For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path * in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt" * @return A {@link DataLakeFileClient} used to interact with the new file created. */ @ServiceMethod(returns = ReturnType.SINGLE) public DataLakeFileClient rename(String destinationFileSystem, String destinationPath) { return renameWithResponse(destinationFileSystem, destinationPath, null, null, null, null).getValue(); } /** * Moves the file to another location within the file system. * For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse * <pre> * DataLakeRequestConditions sourceRequestConditions = new DataLakeRequestConditions& * .setLeaseId& * DataLakeRequestConditions destinationRequestConditions = new DataLakeRequestConditions& * * DataLakeFileClient newRenamedClient = client.renameWithResponse& * sourceRequestConditions, destinationRequestConditions, timeout, new Context& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath Relative path from the file system to rename the file to, excludes the file system name. * For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path * in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt" * @param sourceRequestConditions {@link DataLakeRequestConditions} against the source. * @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A {@link Response} whose {@link Response * used to interact with the file created. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<DataLakeFileClient> renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context) { Mono<Response<DataLakeFileClient>> response = dataLakeFileAsyncClient.renameWithResponse(destinationFileSystem, destinationPath, sourceRequestConditions, destinationRequestConditions, context) .map(asyncResponse -> new SimpleResponse<>(asyncResponse.getRequest(), asyncResponse.getStatusCode(), asyncResponse.getHeaders(), new DataLakeFileClient(new DataLakeFileAsyncClient(asyncResponse.getValue()), new SpecializedBlobClientBuilder() .blobAsyncClient(asyncResponse.getValue().blockBlobAsyncClient) .buildBlockBlobClient()))); Response<DataLakeFileClient> resp = StorageImplUtils.blockWithOptionalTimeout(response, timeout); return new SimpleResponse<>(resp, new DataLakeFileClient(resp.getValue())); } /** * Opens an input stream to query the file. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * <pre> * String expression = &quot;SELECT * from BlobStorage&quot;; * InputStream inputStream = client.openQueryInputStream& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * * @param expression The query expression. * @return An <code>InputStream</code> object that represents the stream to use for reading the query response. */ public InputStream openQueryInputStream(String expression) { return openQueryInputStreamWithResponse(new FileQueryOptions(expression)).getValue(); } /** * Opens an input stream to query the file. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * <pre> * String expression = &quot;SELECT * from BlobStorage&quot;; * FileQuerySerialization input = new FileQueryDelimitedSerialization& * .setColumnSeparator& * .setEscapeChar& * .setRecordSeparator& * .setHeadersPresent& * .setFieldQuote& * FileQuerySerialization output = new FileQueryJsonSerialization& * .setRecordSeparator& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * Consumer&lt;FileQueryError&gt; errorConsumer = System.out::println; * Consumer&lt;FileQueryProgress&gt; progressConsumer = progress -&gt; System.out.println& * + progress.getBytesScanned& * FileQueryOptions queryOptions = new FileQueryOptions& * .setInputSerialization& * .setOutputSerialization& * .setRequestConditions& * .setErrorConsumer& * .setProgressConsumer& * * InputStream inputStream = client.openQueryInputStreamWithResponse& * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream * * @param queryOptions {@link FileQueryOptions The query options}. * @return A response containing status code and HTTP headers including an <code>InputStream</code> object * that represents the stream to use for reading the query response. */ public Response<InputStream> openQueryInputStreamWithResponse(FileQueryOptions queryOptions) { FileQueryAsyncResponse response = dataLakeFileAsyncClient.queryWithResponse(queryOptions) .block(); if (response == null) { throw LOGGER.logExceptionAsError(new IllegalStateException("Query response cannot be null")); } return new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), new FluxInputStream(response.getValue()), response.getDeserializedHeaders()); } /** * Queries an entire file into an output stream. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.query * <pre> * ByteArrayOutputStream queryData = new ByteArrayOutputStream& * String expression = &quot;SELECT * from BlobStorage&quot;; * client.query& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.query * * @param stream A non-null {@link OutputStream} instance where the downloaded data will be written. * @param expression The query expression. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null. */ public void query(OutputStream stream, String expression) { queryWithResponse(new FileQueryOptions(expression, stream), null, Context.NONE); } /** * Queries an entire file into an output stream. * * <p>For more information, see the * <a href="https: * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse * <pre> * ByteArrayOutputStream queryData = new ByteArrayOutputStream& * String expression = &quot;SELECT * from BlobStorage&quot;; * FileQueryJsonSerialization input = new FileQueryJsonSerialization& * .setRecordSeparator& * FileQueryDelimitedSerialization output = new FileQueryDelimitedSerialization& * .setEscapeChar& * .setColumnSeparator& * .setRecordSeparator& * .setFieldQuote& * .setHeadersPresent& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * Consumer&lt;FileQueryError&gt; errorConsumer = System.out::println; * Consumer&lt;FileQueryProgress&gt; progressConsumer = progress -&gt; System.out.println& * + progress.getBytesScanned& * FileQueryOptions queryOptions = new FileQueryOptions& * .setInputSerialization& * .setOutputSerialization& * .setRequestConditions& * .setErrorConsumer& * .setProgressConsumer& * System.out.printf& * client.queryWithResponse& * .getStatusCode& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse * * @param queryOptions {@link FileQueryOptions The query options}. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing status code and HTTP headers. * @throws UncheckedIOException If an I/O error occurs. * @throws NullPointerException if {@code stream} is null. */ public FileQueryResponse queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context) { return DataLakeImplUtils.returnOrConvertException(() -> { BlobQueryResponse response = blockBlobClient.queryWithResponse( Transforms.toBlobQueryOptions(queryOptions), timeout, context); return Transforms.toFileQueryResponse(response); }, LOGGER); } /** * Schedules the file for deletion. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion * <pre> * FileScheduleDeletionOptions options = new FileScheduleDeletionOptions& * client.scheduleDeletion& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion * * @param options Schedule deletion parameters. */ @ServiceMethod(returns = ReturnType.SINGLE) public void scheduleDeletion(FileScheduleDeletionOptions options) { this.scheduleDeletionWithResponse(options, null, Context.NONE); } /** * Schedules the file for deletion. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse * <pre> * FileScheduleDeletionOptions options = new FileScheduleDeletionOptions& * Context context = new Context& * * client.scheduleDeletionWithResponse& * System.out.println& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse * * @param options Schedule deletion parameters. * @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A response containing status code and HTTP headers. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context) { Mono<Response<Void>> response = this.dataLakeFileAsyncClient.scheduleDeletionWithResponse(options, context); return StorageImplUtils.blockWithOptionalTimeout(response, timeout); } }
I don't thinnk we _always_ want to mount this. Can you make doing so a job server command line arg and plumb it through?
public RemoteEnvironment createEnvironment(Environment environment) throws Exception { String workerId = idGenerator.getId(); String containerImage = environment.getUrl(); String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl(); String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl(); String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl(); String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl(); List<String> volArg = new ArrayList<>(); volArg.addAll(gcsCredentialArgs()); volArg.add("--network=host"); List<String> args = ImmutableList.of( String.format("--id=%s", workerId), String.format("--logging_endpoint=%s", loggingEndpoint), String.format("--artifact_endpoint=%s", artifactEndpoint), String.format("--provision_endpoint=%s", provisionEndpoint), String.format("--control_endpoint=%s", controlEndpoint)); LOG.debug("Creating Docker Container with ID {}", workerId); String containerId = null; InstructionRequestHandler instructionHandler = null; try { containerId = docker.runImage(containerImage, volArg, args); LOG.debug("Created Docker Container with Container ID {}", containerId); while (instructionHandler == null) { try { instructionHandler = clientSource.take(workerId, Duration.ofMinutes(2)); } catch (TimeoutException timeoutEx) { LOG.info( "Still waiting for startup of environment {} for worker id {}", environment.getUrl(), workerId); } catch (InterruptedException interruptEx) { Thread.currentThread().interrupt(); throw new RuntimeException(interruptEx); } } } catch (Exception e) { if (containerId != null) { try { docker.killContainer(containerId); } catch (Exception dockerException) { e.addSuppressed(dockerException); } } throw e; } return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler); }
volArg.addAll(gcsCredentialArgs());
public RemoteEnvironment createEnvironment(Environment environment) throws Exception { String workerId = idGenerator.getId(); String containerImage = environment.getUrl(); String loggingEndpoint = loggingServiceServer.getApiServiceDescriptor().getUrl(); String artifactEndpoint = retrievalServiceServer.getApiServiceDescriptor().getUrl(); String provisionEndpoint = provisioningServiceServer.getApiServiceDescriptor().getUrl(); String controlEndpoint = controlServiceServer.getApiServiceDescriptor().getUrl(); List<String> volArg = ImmutableList.<String>builder() .addAll(gcsCredentialArgs()) .add("--network=host") .build(); List<String> args = ImmutableList.of( String.format("--id=%s", workerId), String.format("--logging_endpoint=%s", loggingEndpoint), String.format("--artifact_endpoint=%s", artifactEndpoint), String.format("--provision_endpoint=%s", provisionEndpoint), String.format("--control_endpoint=%s", controlEndpoint)); LOG.debug("Creating Docker Container with ID {}", workerId); String containerId = null; InstructionRequestHandler instructionHandler = null; try { containerId = docker.runImage(containerImage, volArg, args); LOG.debug("Created Docker Container with Container ID {}", containerId); while (instructionHandler == null) { try { instructionHandler = clientSource.take(workerId, Duration.ofMinutes(2)); } catch (TimeoutException timeoutEx) { LOG.info( "Still waiting for startup of environment {} for worker id {}", environment.getUrl(), workerId); } catch (InterruptedException interruptEx) { Thread.currentThread().interrupt(); throw new RuntimeException(interruptEx); } } } catch (Exception e) { if (containerId != null) { try { docker.killContainer(containerId); } catch (Exception dockerException) { e.addSuppressed(dockerException); } } throw e; } return DockerContainerEnvironment.create(docker, environment, containerId, instructionHandler); }
class DockerEnvironmentFactory implements EnvironmentFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class); /** * Returns a {@link DockerEnvironmentFactory} for the provided {@link GrpcFnServer servers} using * the default {@link DockerCommand}. */ public static DockerEnvironmentFactory forServices( GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator) { return forServicesWithDocker( DockerCommand.getDefault(), controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, clientSource, idGenerator); } static DockerEnvironmentFactory forServicesWithDocker( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator) { return new DockerEnvironmentFactory( docker, controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, idGenerator, clientSource); } private final DockerCommand docker; private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer; private final GrpcFnServer<GrpcLoggingService> loggingServiceServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer; private final IdGenerator idGenerator; private final ControlClientPool.Source clientSource; private DockerEnvironmentFactory( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, IdGenerator idGenerator, ControlClientPool.Source clientSource) { this.docker = docker; this.controlServiceServer = controlServiceServer; this.loggingServiceServer = loggingServiceServer; this.retrievalServiceServer = retrievalServiceServer; this.provisioningServiceServer = provisioningServiceServer; this.idGenerator = idGenerator; this.clientSource = clientSource; } /** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */ @Override private List<String> gcsCredentialArgs() { String dockerGcloudConfig = "/root/lconfig/gcloud"; String localGcloudConfig = firstNonNull( System.getenv("CLOUDSDK_CONFIG"), Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString()); if (Files.exists(Paths.get(localGcloudConfig))) { return ImmutableList.of( "--mount", String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig), "--env", String.format("CLOUDSDK_CONFIG=%s", dockerGcloudConfig)); } else { return ImmutableList.of(); } } }
class DockerEnvironmentFactory implements EnvironmentFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerEnvironmentFactory.class); /** * Returns a {@link DockerEnvironmentFactory} for the provided {@link GrpcFnServer servers} using * the default {@link DockerCommand}. */ public static DockerEnvironmentFactory forServices( GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator) { return forServicesWithDocker( DockerCommand.getDefault(), controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, clientSource, idGenerator); } static DockerEnvironmentFactory forServicesWithDocker( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, ControlClientPool.Source clientSource, IdGenerator idGenerator) { return new DockerEnvironmentFactory( docker, controlServiceServer, loggingServiceServer, retrievalServiceServer, provisioningServiceServer, idGenerator, clientSource); } private final DockerCommand docker; private final GrpcFnServer<FnApiControlClientPoolService> controlServiceServer; private final GrpcFnServer<GrpcLoggingService> loggingServiceServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer; private final IdGenerator idGenerator; private final ControlClientPool.Source clientSource; private DockerEnvironmentFactory( DockerCommand docker, GrpcFnServer<FnApiControlClientPoolService> controlServiceServer, GrpcFnServer<GrpcLoggingService> loggingServiceServer, GrpcFnServer<ArtifactRetrievalService> retrievalServiceServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServiceServer, IdGenerator idGenerator, ControlClientPool.Source clientSource) { this.docker = docker; this.controlServiceServer = controlServiceServer; this.loggingServiceServer = loggingServiceServer; this.retrievalServiceServer = retrievalServiceServer; this.provisioningServiceServer = provisioningServiceServer; this.idGenerator = idGenerator; this.clientSource = clientSource; } /** Creates a new, active {@link RemoteEnvironment} backed by a local Docker container. */ @Override private List<String> gcsCredentialArgs() { String dockerGcloudConfig = "/root/.config/gcloud"; String localGcloudConfig = firstNonNull( System.getenv("CLOUDSDK_CONFIG"), Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString()); if (Files.exists(Paths.get(localGcloudConfig))) { return ImmutableList.of( "--mount", String.format("type=bind,src=%s,dst=%s", localGcloudConfig, dockerGcloudConfig)); } else { return ImmutableList.of(); } } }
why are we using containerLink and not containerName?
public String toString() { if (this.cosmosContainerIdentities == null || cosmosContainerIdentities.isEmpty()) { return ""; } return String.format( "(containers:%s)(pcrc:%d)(awd:%s)", cosmosContainerIdentities .stream() .map(ci -> String.join( ".", containerIdAccessor.getContainerLink(ci))) .collect(Collectors.joining(";")), numProactiveConnectionRegions, aggressiveWarmupDuration); }
containerIdAccessor.getContainerLink(ci)))
public String toString() { if (this.cosmosContainerIdentities == null || cosmosContainerIdentities.isEmpty()) { return ""; } return String.format( "(containers:%s)(pcrc:%d)(awd:%s)", cosmosContainerIdentities .stream() .map(ci -> String.join( ".", containerIdAccessor.getContainerLink(ci))) .collect(Collectors.joining(";")), numProactiveConnectionRegions, aggressiveWarmupDuration); }
class CosmosContainerProactiveInitConfig { private final static ImplementationBridgeHelpers.CosmosContainerIdentityHelper.CosmosContainerIdentityAccessor containerIdAccessor = ImplementationBridgeHelpers .CosmosContainerIdentityHelper .getCosmosContainerIdentityAccessor(); private final List<CosmosContainerIdentity> cosmosContainerIdentities; private final Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> containerDirectConnectionMetadataMap; private final int numProactiveConnectionRegions; private final Duration aggressiveWarmupDuration; CosmosContainerProactiveInitConfig( int numProactiveConnectionRegions, Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> containerDirectConnectionMetadataMap, Duration aggressiveWarmupDuration) { this.cosmosContainerIdentities = new ArrayList<>(containerDirectConnectionMetadataMap.keySet()); this.numProactiveConnectionRegions = numProactiveConnectionRegions; this.containerDirectConnectionMetadataMap = containerDirectConnectionMetadataMap; this.aggressiveWarmupDuration = aggressiveWarmupDuration; } /** * Gets the list of container identities. The returned list is protected against modifications. * * @return list of {@link CosmosContainerIdentity} * */ public List<CosmosContainerIdentity> getCosmosContainerIdentities() { return Collections.unmodifiableList(this.cosmosContainerIdentities); } /** * Gets the no. of proactive connection regions * * <p> * Proactive connection regions constitute those regions where replicas of container partitions have connections opened to prior * to performing any workload on the container. This way the latency associated with opening connections * does not impact the latency associated with performing workloads on the container. These connections are * opened synchronously when the {@link CosmosClient}/{@link CosmosAsyncClient} is built. * </p> * <p> * These proactive connection regions are a subset of the preferred regions configured through the {@link CosmosClientBuilder}. The first * {@link CosmosContainerProactiveInitConfig * </p> * <p> * Consider a multi-master account with client configured with preferred regions - "US West" (write-region) and "US East" (write-region) * <br> * 1. If the no. of proactive regions is set to two, connections to "US West" and "US East" are opened proactively. * <br> * 2. If the no. of proactive regions is set to one, connections to "US West" are opened proactively. * <br><br> * Consider a single-master account with client configured with preferred regions - "US West" (read-region), "US East" (read-region) and * "West Europe" (write-region) * <br> * 1. If the no. of proactive regions is set to two, connections to "US West" and "US East" are opened proactively. If your application * has workloads which are write-heavy it is important to prioritize write regions in the list of preferred regions. * </p> * * @return no. of proactive connection regions */ public int getProactiveConnectionRegionsCount() { return numProactiveConnectionRegions; } /** * Gets the duration within which connections will be opened aggressively and in a blocking manner and outside * which connections will be opened defensively and in a non-blocking manner * * @return the aggressive proactive connection establishment duration * */ Duration getAggressiveWarmupDuration() { return this.aggressiveWarmupDuration; } @Override static void initialize() { ImplementationBridgeHelpers.CosmosContainerProactiveInitConfigHelper.setCosmosContainerProactiveInitConfigAccessor(new ImplementationBridgeHelpers.CosmosContainerProactiveInitConfigHelper.CosmosContainerProactiveInitConfigAccessor() { @Override public Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> getContainerPropertiesMap(CosmosContainerProactiveInitConfig cosmosContainerProactiveInitConfig) { return cosmosContainerProactiveInitConfig.containerDirectConnectionMetadataMap; } }); } static { initialize(); } }
class CosmosContainerProactiveInitConfig { private final static ImplementationBridgeHelpers.CosmosContainerIdentityHelper.CosmosContainerIdentityAccessor containerIdAccessor = ImplementationBridgeHelpers .CosmosContainerIdentityHelper .getCosmosContainerIdentityAccessor(); private final List<CosmosContainerIdentity> cosmosContainerIdentities; private final Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> containerDirectConnectionMetadataMap; private final int numProactiveConnectionRegions; private final Duration aggressiveWarmupDuration; CosmosContainerProactiveInitConfig( int numProactiveConnectionRegions, Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> containerDirectConnectionMetadataMap, Duration aggressiveWarmupDuration) { this.cosmosContainerIdentities = new ArrayList<>(containerDirectConnectionMetadataMap.keySet()); this.numProactiveConnectionRegions = numProactiveConnectionRegions; this.containerDirectConnectionMetadataMap = containerDirectConnectionMetadataMap; this.aggressiveWarmupDuration = aggressiveWarmupDuration; } /** * Gets the list of container identities. The returned list is protected against modifications. * * @return list of {@link CosmosContainerIdentity} * */ public List<CosmosContainerIdentity> getCosmosContainerIdentities() { return Collections.unmodifiableList(this.cosmosContainerIdentities); } /** * Gets the no. of proactive connection regions * * <p> * Proactive connection regions constitute those regions where replicas of container partitions have connections opened to prior * to performing any workload on the container. This way the latency associated with opening connections * does not impact the latency associated with performing workloads on the container. These connections are * opened synchronously when the {@link CosmosClient}/{@link CosmosAsyncClient} is built. * </p> * <p> * These proactive connection regions are a subset of the preferred regions configured through the {@link CosmosClientBuilder}. The first * {@link CosmosContainerProactiveInitConfig * </p> * <p> * Consider a multi-master account with client configured with preferred regions - "US West" (write-region) and "US East" (write-region) * <br> * 1. If the no. of proactive regions is set to two, connections to "US West" and "US East" are opened proactively. * <br> * 2. If the no. of proactive regions is set to one, connections to "US West" are opened proactively. * <br><br> * Consider a single-master account with client configured with preferred regions - "US West" (read-region), "US East" (read-region) and * "West Europe" (write-region) * <br> * 1. If the no. of proactive regions is set to two, connections to "US West" and "US East" are opened proactively. If your application * has workloads which are write-heavy it is important to prioritize write regions in the list of preferred regions. * </p> * * @return no. of proactive connection regions */ public int getProactiveConnectionRegionsCount() { return numProactiveConnectionRegions; } /** * Gets the duration within which connections will be opened aggressively and in a blocking manner and outside * which connections will be opened defensively and in a non-blocking manner * * @return the aggressive proactive connection establishment duration * */ Duration getAggressiveWarmupDuration() { return this.aggressiveWarmupDuration; } @Override static void initialize() { ImplementationBridgeHelpers.CosmosContainerProactiveInitConfigHelper.setCosmosContainerProactiveInitConfigAccessor(new ImplementationBridgeHelpers.CosmosContainerProactiveInitConfigHelper.CosmosContainerProactiveInitConfigAccessor() { @Override public Map<CosmosContainerIdentity, ContainerDirectConnectionMetadata> getContainerPropertiesMap(CosmosContainerProactiveInitConfig cosmosContainerProactiveInitConfig) { return cosmosContainerProactiveInitConfig.containerDirectConnectionMetadataMap; } }); } static { initialize(); } }
For the code itself, yes, this PR changed the logic. However, for the internal of Flink, we do not change the behavior, and I think current change is the correct thing. After each job finished, Flink will clean up job related data (such as checkpoint id counter) on [Dispatcher#removeJob](https://github.com/apache/flink/blob/278504a2787a154faf6f6401028d4bbadafbba0a/flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/Dispatcher.java#L1258), and once the cluster shutdown, it will then clean the cluster related configmap (such as HA related leader) on [ClusterEntrypoint#stopClusterServices](https://github.com/apache/flink/blob/278504a2787a154faf6f6401028d4bbadafbba0a/flink-runtime/src/main/java/org/apache/flink/runtime/entrypoint/ClusterEntrypoint.java#L502). For `KubernetesLeaderElectionHaServices`, it should only clean cluster related configs on `#internalCleanup` instead of cleanning the configmaps via labels.
public void internalCleanupJobData(JobID jobID) throws Exception { kubeClient.deleteConfigMap(getJobSpecificConfigMap(jobID)).get(); }
kubeClient.deleteConfigMap(getJobSpecificConfigMap(jobID)).get();
public void internalCleanupJobData(JobID jobID) throws Exception { kubeClient.deleteConfigMap(getJobSpecificConfigMap(jobID)).get(); }
class KubernetesLeaderElectionHaServices extends AbstractHaServices { private static final Logger LOG = LoggerFactory.getLogger(KubernetesLeaderElectionHaServices.class); private final String clusterId; private final FlinkKubeClient kubeClient; private final KubernetesConfigMapSharedWatcher configMapSharedWatcher; private final ExecutorService watchExecutorService; private final String lockIdentity; KubernetesLeaderElectionHaServices( FlinkKubeClient kubeClient, Executor ioExecutor, Configuration configuration, BlobStoreService blobStoreService) throws Exception { this( kubeClient, kubeClient.createConfigMapSharedWatcher( getClusterConfigMap(configuration.get(KubernetesConfigOptions.CLUSTER_ID))), Executors.newCachedThreadPool( new ExecutorThreadFactory("config-map-watch-handler")), ioExecutor, configuration.get(KubernetesConfigOptions.CLUSTER_ID), UUID.randomUUID().toString(), configuration, blobStoreService); } private KubernetesLeaderElectionHaServices( FlinkKubeClient kubeClient, KubernetesConfigMapSharedWatcher configMapSharedWatcher, ExecutorService watchExecutorService, Executor ioExecutor, String clusterId, String lockIdentity, Configuration configuration, BlobStoreService blobStoreService) throws Exception { super( configuration, createDriverFactory( kubeClient, configMapSharedWatcher, watchExecutorService, clusterId, lockIdentity, configuration), ioExecutor, blobStoreService, FileSystemJobResultStore.fromConfiguration(configuration, ioExecutor)); this.kubeClient = checkNotNull(kubeClient); this.clusterId = checkNotNull(clusterId); this.configMapSharedWatcher = checkNotNull(configMapSharedWatcher); this.watchExecutorService = checkNotNull(watchExecutorService); this.lockIdentity = checkNotNull(lockIdentity); } private static LeaderElectionDriverFactory createDriverFactory( FlinkKubeClient kubeClient, KubernetesConfigMapSharedWatcher configMapSharedWatcher, Executor watchExecutorService, String clusterId, String lockIdentity, Configuration configuration) { final KubernetesLeaderElectionConfiguration leaderElectionConfiguration = new KubernetesLeaderElectionConfiguration( getClusterConfigMap(clusterId), lockIdentity, configuration); return new KubernetesLeaderElectionDriverFactory( kubeClient, leaderElectionConfiguration, configMapSharedWatcher, watchExecutorService); } @Override protected LeaderRetrievalService createLeaderRetrievalService(String componentId) { return new DefaultLeaderRetrievalService( new KubernetesLeaderRetrievalDriverFactory( configMapSharedWatcher, watchExecutorService, getClusterConfigMap(), componentId)); } @Override protected CheckpointRecoveryFactory createCheckpointRecoveryFactory() { return KubernetesCheckpointRecoveryFactory.withoutLeadershipValidation( kubeClient, configuration, ioExecutor, clusterId, this::getJobSpecificConfigMap); } private String getJobSpecificConfigMap(JobID jobID) { return clusterId + NAME_SEPARATOR + jobID.toString() + NAME_SEPARATOR + "config-map"; } @Override protected JobGraphStore createJobGraphStore() throws Exception { return KubernetesUtils.createJobGraphStore( configuration, kubeClient, getClusterConfigMap(), lockIdentity); } private String getClusterConfigMap() { return getClusterConfigMap(clusterId); } private static String getClusterConfigMap(String clusterId) { return clusterId + NAME_SEPARATOR + "cluster-config-map"; } @Override public void internalClose() throws Exception { Exception exception = null; try { closeK8sServices(); } catch (Exception e) { exception = e; } kubeClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.watchExecutorService); ExceptionUtils.tryRethrowException(exception); } private void closeK8sServices() { configMapSharedWatcher.close(); final int outstandingTaskCount = watchExecutorService.shutdownNow().size(); if (outstandingTaskCount != 0) { LOG.debug( "The k8s HA services were closed with {} event(s) still not being processed. No further action necessary.", outstandingTaskCount); } } @Override public void internalCleanup() throws Exception { Exception exception = null; try { closeK8sServices(); } catch (Exception e) { exception = e; } kubeClient.deleteConfigMap(getClusterConfigMap()).get(); ExceptionUtils.tryRethrowException(exception); } @Override @Override protected String getLeaderPathForResourceManager() { return "resourcemanager"; } @Override protected String getLeaderPathForDispatcher() { return "dispatcher"; } @Override protected String getLeaderPathForJobManager(JobID jobID) { return "job-" + jobID.toString(); } @Override protected String getLeaderPathForRestServer() { return "restserver"; } }
class KubernetesLeaderElectionHaServices extends AbstractHaServices { private static final Logger LOG = LoggerFactory.getLogger(KubernetesLeaderElectionHaServices.class); private final String clusterId; private final FlinkKubeClient kubeClient; private final KubernetesConfigMapSharedWatcher configMapSharedWatcher; private final ExecutorService watchExecutorService; private final String lockIdentity; KubernetesLeaderElectionHaServices( FlinkKubeClient kubeClient, Executor ioExecutor, Configuration configuration, BlobStoreService blobStoreService) throws Exception { this( kubeClient, kubeClient.createConfigMapSharedWatcher( getClusterConfigMap(configuration.get(KubernetesConfigOptions.CLUSTER_ID))), Executors.newCachedThreadPool( new ExecutorThreadFactory("config-map-watch-handler")), ioExecutor, configuration.get(KubernetesConfigOptions.CLUSTER_ID), UUID.randomUUID().toString(), configuration, blobStoreService); } private KubernetesLeaderElectionHaServices( FlinkKubeClient kubeClient, KubernetesConfigMapSharedWatcher configMapSharedWatcher, ExecutorService watchExecutorService, Executor ioExecutor, String clusterId, String lockIdentity, Configuration configuration, BlobStoreService blobStoreService) throws Exception { super( configuration, createDriverFactory( kubeClient, configMapSharedWatcher, watchExecutorService, clusterId, lockIdentity, configuration), ioExecutor, blobStoreService, FileSystemJobResultStore.fromConfiguration(configuration, ioExecutor)); this.kubeClient = checkNotNull(kubeClient); this.clusterId = checkNotNull(clusterId); this.configMapSharedWatcher = checkNotNull(configMapSharedWatcher); this.watchExecutorService = checkNotNull(watchExecutorService); this.lockIdentity = checkNotNull(lockIdentity); } private static LeaderElectionDriverFactory createDriverFactory( FlinkKubeClient kubeClient, KubernetesConfigMapSharedWatcher configMapSharedWatcher, Executor watchExecutorService, String clusterId, String lockIdentity, Configuration configuration) { final KubernetesLeaderElectionConfiguration leaderElectionConfiguration = new KubernetesLeaderElectionConfiguration( getClusterConfigMap(clusterId), lockIdentity, configuration); return new KubernetesLeaderElectionDriverFactory( kubeClient, leaderElectionConfiguration, configMapSharedWatcher, watchExecutorService); } @Override protected LeaderRetrievalService createLeaderRetrievalService(String componentId) { return new DefaultLeaderRetrievalService( new KubernetesLeaderRetrievalDriverFactory( configMapSharedWatcher, watchExecutorService, getClusterConfigMap(), componentId)); } @Override protected CheckpointRecoveryFactory createCheckpointRecoveryFactory() { return KubernetesCheckpointRecoveryFactory.withoutLeadershipValidation( kubeClient, configuration, ioExecutor, clusterId, this::getJobSpecificConfigMap); } private String getJobSpecificConfigMap(JobID jobID) { return clusterId + NAME_SEPARATOR + jobID.toString() + NAME_SEPARATOR + "config-map"; } @Override protected JobGraphStore createJobGraphStore() throws Exception { return KubernetesUtils.createJobGraphStore( configuration, kubeClient, getClusterConfigMap(), lockIdentity); } private String getClusterConfigMap() { return getClusterConfigMap(clusterId); } private static String getClusterConfigMap(String clusterId) { return clusterId + NAME_SEPARATOR + "cluster-config-map"; } @Override public void internalClose() throws Exception { Exception exception = null; try { closeK8sServices(); } catch (Exception e) { exception = e; } kubeClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.watchExecutorService); ExceptionUtils.tryRethrowException(exception); } private void closeK8sServices() { configMapSharedWatcher.close(); final int outstandingTaskCount = watchExecutorService.shutdownNow().size(); if (outstandingTaskCount != 0) { LOG.debug( "The k8s HA services were closed with {} event(s) still not being processed. No further action necessary.", outstandingTaskCount); } } @Override public void internalCleanup() throws Exception { Exception exception = null; try { closeK8sServices(); } catch (Exception e) { exception = e; } kubeClient.deleteConfigMap(getClusterConfigMap()).get(); ExceptionUtils.tryRethrowException(exception); } @Override @Override protected String getLeaderPathForResourceManager() { return "resourcemanager"; } @Override protected String getLeaderPathForDispatcher() { return "dispatcher"; } @Override protected String getLeaderPathForJobManager(JobID jobID) { return "job-" + jobID.toString(); } @Override protected String getLeaderPathForRestServer() { return "restserver"; } }
Shall we order these alphabetically and have the same order as the spec (https://github.com/ballerina-platform/ballerina-spec/commit/158691e90bcc05c2f32137688d7669072fa911c0)? O:)
public void loadPredeclaredModules() { Map<Name, BPackageSymbol> modules = new HashMap<>(); modules.put(Names.ERROR, this.langErrorModuleSymbol); modules.put(Names.OBJECT, this.langObjectModuleSymbol); modules.put(Names.XML, this.langXmlModuleSymbol); modules.put(Names.INT, this.langIntModuleSymbol); modules.put(Names.FLOAT, this.langFloatModuleSymbol); modules.put(Names.DECIMAL, this.langDecimalModuleSymbol); modules.put(Names.BOOLEAN, this.langBooleanModuleSymbol); modules.put(Names.FUTURE, this.langFutureModuleSymbol); modules.put(Names.MAP, this.langMapModuleSymbol); modules.put(Names.STREAM, this.langStreamModuleSymbol); modules.put(Names.STRING, this.langStringModuleSymbol); modules.put(Names.TABLE, this.langTableModuleSymbol); modules.put(Names.TYPEDESC, this.langTypedescModuleSymbol); this.predeclaredModules = Collections.unmodifiableMap(modules); }
modules.put(Names.TYPEDESC, this.langTypedescModuleSymbol);
public void loadPredeclaredModules() { Map<Name, BPackageSymbol> modules = new HashMap<>(); modules.put(Names.BOOLEAN, this.langBooleanModuleSymbol); modules.put(Names.DECIMAL, this.langDecimalModuleSymbol); modules.put(Names.ERROR, this.langErrorModuleSymbol); modules.put(Names.FLOAT, this.langFloatModuleSymbol); modules.put(Names.FUTURE, this.langFutureModuleSymbol); modules.put(Names.INT, this.langIntModuleSymbol); modules.put(Names.MAP, this.langMapModuleSymbol); modules.put(Names.OBJECT, this.langObjectModuleSymbol); modules.put(Names.STREAM, this.langStreamModuleSymbol); modules.put(Names.STRING, this.langStringModuleSymbol); modules.put(Names.TABLE, this.langTableModuleSymbol); modules.put(Names.TYPEDESC, this.langTypedescModuleSymbol); modules.put(Names.XML, this.langXmlModuleSymbol); this.predeclaredModules = Collections.unmodifiableMap(modules); }
class SymbolTable { private static final CompilerContext.Key<SymbolTable> SYM_TABLE_KEY = new CompilerContext.Key<>(); public static final PackageID TRANSACTION = new PackageID(Names.BUILTIN_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY); public static final Integer BBYTE_MIN_VALUE = 0; public static final Integer BBYTE_MAX_VALUE = 255; public static final Integer SIGNED32_MAX_VALUE = 2147483647; public static final Integer SIGNED32_MIN_VALUE = -2147483648; public static final Integer SIGNED16_MAX_VALUE = 32767; public static final Integer SIGNED16_MIN_VALUE = -32768; public static final Integer SIGNED8_MAX_VALUE = 127; public static final Integer SIGNED8_MIN_VALUE = -128; public static final Long UNSIGNED32_MAX_VALUE = 4294967295L; public static final Integer UNSIGNED16_MAX_VALUE = 65535; public static final Integer UNSIGNED8_MAX_VALUE = 255; public final DiagnosticPos builtinPos; public final BLangPackage rootPkgNode; public final BPackageSymbol rootPkgSymbol; public final BSymbol notFoundSymbol; public final Scope rootScope; public final BType noType = new BNoType(TypeTags.NONE); public final BType nilType = new BNilType(); public final BType neverType = new BNeverType(); public final BType intType = new BType(TypeTags.INT, null, Flags.READONLY); public final BType byteType = new BType(TypeTags.BYTE, null, Flags.READONLY); public final BType floatType = new BType(TypeTags.FLOAT, null, Flags.READONLY); public final BType decimalType = new BType(TypeTags.DECIMAL, null, Flags.READONLY); public final BType stringType = new BType(TypeTags.STRING, null, Flags.READONLY); public final BType booleanType = new BType(TypeTags.BOOLEAN, null, Flags.READONLY); public final BType jsonType = new BJSONType(TypeTags.JSON, null); public final BType anyType = new BAnyType(TypeTags.ANY, null); public final BType anydataType = new BAnydataType(TypeTags.ANYDATA, null); public final BMapType mapType = new BMapType(TypeTags.MAP, anyType, null); public final BMapType mapStringType = new BMapType(TypeTags.MAP, stringType, null); public final BMapType mapAnydataType = new BMapType(TypeTags.MAP, anydataType, null); public final BMapType mapJsonType = new BMapType(TypeTags.MAP, jsonType, null); public final BFutureType futureType = new BFutureType(TypeTags.FUTURE, nilType, null); public final BArrayType arrayType = new BArrayType(anyType); public final BArrayType arrayStringType = new BArrayType(stringType); public final BArrayType arrayAnydataType = new BArrayType(anydataType); public final BArrayType arrayJsonType = new BArrayType(jsonType); public final BType tupleType = new BTupleType(Lists.of(noType)); public final BType recordType = new BRecordType(null); public final BType stringArrayType = new BArrayType(stringType); public final BType jsonArrayType = new BArrayType(jsonType); public final BType anydataArrayType = new BArrayType(anydataType); public final BType anyServiceType = new BServiceType(null); public final BType handleType = new BHandleType(TypeTags.HANDLE, null); public final BTypedescType typeDesc = new BTypedescType(this.anyType, null); public final BType readonlyType = new BReadonlyType(TypeTags.READONLY, null); public final BType anydataOrReadonly = BUnionType.create(null, anydataType, readonlyType); public final BType semanticError = new BType(TypeTags.SEMANTIC_ERROR, null); public final BType nullSet = new BType(TypeTags.NULL_SET, null); public final BUnionType anydataOrReadOnlyType = BUnionType.create(null, anydataType, readonlyType); public BType streamType = new BStreamType(TypeTags.STREAM, anydataType, null, null); public BType tableType = new BTableType(TypeTags.TABLE, anydataType, null); public BMapType detailType = new BMapType(TypeTags.MAP, anydataOrReadonly, null); public BErrorType errorType = new BErrorType(null, detailType); public BConstructorSymbol errorConstructor; public BUnionType anyOrErrorType; public BUnionType pureType; public BUnionType errorOrNilType; public BFiniteType trueType; public BObjectType intRangeType; public BMapType mapAllType; public BArrayType arrayAllType; public BObjectType rawTemplateType; public final BIntSubType signed32IntType = new BIntSubType(TypeTags.SIGNED32_INT, Names.SIGNED32); public final BIntSubType signed16IntType = new BIntSubType(TypeTags.SIGNED16_INT, Names.SIGNED16); public final BIntSubType signed8IntType = new BIntSubType(TypeTags.SIGNED8_INT, Names.SIGNED8); public final BIntSubType unsigned32IntType = new BIntSubType(TypeTags.UNSIGNED32_INT, Names.UNSIGNED32); public final BIntSubType unsigned16IntType = new BIntSubType(TypeTags.UNSIGNED16_INT, Names.UNSIGNED16); public final BIntSubType unsigned8IntType = new BIntSubType(TypeTags.UNSIGNED8_INT, Names.UNSIGNED8); public final BStringSubType charStringType = new BStringSubType(TypeTags.CHAR_STRING, Names.CHAR); public final BXMLSubType xmlElementType = new BXMLSubType(TypeTags.XML_ELEMENT, Names.XML_ELEMENT); public final BXMLSubType xmlPIType = new BXMLSubType(TypeTags.XML_PI, Names.XML_PI); public final BXMLSubType xmlCommentType = new BXMLSubType(TypeTags.XML_COMMENT, Names.XML_COMMENT); public final BXMLSubType xmlTextType = new BXMLSubType(TypeTags.XML_TEXT, Names.XML_TEXT, Flags.READONLY); public final BType xmlType = new BXMLType(BUnionType.create(null, xmlElementType, xmlCommentType, xmlPIType, xmlTextType), null); public BPackageSymbol langInternalModuleSymbol; public BPackageSymbol langAnnotationModuleSymbol; public BPackageSymbol langArrayModuleSymbol; public BPackageSymbol langDecimalModuleSymbol; public BPackageSymbol langErrorModuleSymbol; public BPackageSymbol langFloatModuleSymbol; public BPackageSymbol langFutureModuleSymbol; public BPackageSymbol langIntModuleSymbol; public BPackageSymbol langMapModuleSymbol; public BPackageSymbol langObjectModuleSymbol; public BPackageSymbol langStreamModuleSymbol; public BPackageSymbol langStringModuleSymbol; public BPackageSymbol langTableModuleSymbol; public BPackageSymbol langTypedescModuleSymbol; public BPackageSymbol langValueModuleSymbol; public BPackageSymbol langXmlModuleSymbol; public BPackageSymbol langBooleanModuleSymbol; public BPackageSymbol langQueryModuleSymbol; public BPackageSymbol langTransactionModuleSymbol; private Names names; public Map<BPackageSymbol, SymbolEnv> pkgEnvMap = new HashMap<>(); public Map<Name, BPackageSymbol> predeclaredModules = new HashMap<>(); public static SymbolTable getInstance(CompilerContext context) { SymbolTable symTable = context.get(SYM_TABLE_KEY); if (symTable == null) { symTable = new SymbolTable(context); } return symTable; } private SymbolTable(CompilerContext context) { context.put(SYM_TABLE_KEY, this); this.names = Names.getInstance(context); this.rootPkgNode = (BLangPackage) TreeBuilder.createPackageNode(); this.rootPkgSymbol = new BPackageSymbol(PackageID.ANNOTATIONS, null, null); this.builtinPos = new DiagnosticPos(new BDiagnosticSource(rootPkgSymbol.pkgID, Names.EMPTY.value), 0, 0, 0, 0); this.rootPkgNode.pos = this.builtinPos; this.rootPkgNode.symbol = this.rootPkgSymbol; this.rootScope = new Scope(rootPkgSymbol); this.rootPkgSymbol.scope = this.rootScope; this.rootPkgSymbol.pos = this.builtinPos; this.notFoundSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID, rootPkgSymbol.pkgID, noType, rootPkgSymbol, builtinPos); initializeType(intType, TypeKind.INT.typeName()); initializeType(byteType, TypeKind.BYTE.typeName()); initializeType(floatType, TypeKind.FLOAT.typeName()); initializeType(decimalType, TypeKind.DECIMAL.typeName()); initializeType(stringType, TypeKind.STRING.typeName()); initializeType(booleanType, TypeKind.BOOLEAN.typeName()); initializeType(jsonType, TypeKind.JSON.typeName()); initializeType(xmlType, TypeKind.XML.typeName()); initializeType(streamType, TypeKind.STREAM.typeName()); initializeType(tableType, TypeKind.TABLE.typeName()); initializeType(mapType, TypeKind.MAP.typeName()); initializeType(mapStringType, TypeKind.MAP.typeName()); initializeType(mapAnydataType, TypeKind.MAP.typeName()); initializeType(futureType, TypeKind.FUTURE.typeName()); initializeType(anyType, TypeKind.ANY.typeName()); initializeType(anydataType, TypeKind.ANYDATA.typeName()); initializeType(nilType, TypeKind.NIL.typeName()); initializeType(neverType, TypeKind.NEVER.typeName()); initializeType(anyServiceType, TypeKind.SERVICE.typeName()); initializeType(handleType, TypeKind.HANDLE.typeName()); initializeType(typeDesc, TypeKind.TYPEDESC.typeName()); initializeType(readonlyType, TypeKind.READONLY.typeName()); initializeTSymbol(signed32IntType, Names.SIGNED32, PackageID.INT); initializeTSymbol(signed16IntType, Names.SIGNED16, PackageID.INT); initializeTSymbol(signed8IntType, Names.SIGNED8, PackageID.INT); initializeTSymbol(unsigned32IntType, Names.UNSIGNED32, PackageID.INT); initializeTSymbol(unsigned16IntType, Names.UNSIGNED16, PackageID.INT); initializeTSymbol(unsigned8IntType, Names.UNSIGNED8, PackageID.INT); initializeTSymbol(charStringType, Names.CHAR, PackageID.STRING); initializeTSymbol(xmlElementType, Names.XML_ELEMENT, PackageID.XML); initializeTSymbol(xmlPIType, Names.XML_PI, PackageID.XML); initializeTSymbol(xmlCommentType, Names.XML_COMMENT, PackageID.XML); initializeTSymbol(xmlTextType, Names.XML_TEXT, PackageID.XML); BLangLiteral trueLiteral = new BLangLiteral(); trueLiteral.type = this.booleanType; trueLiteral.value = Boolean.TRUE; BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, Flags.PUBLIC, names.fromString("$anonType$TRUE"), rootPkgNode.packageID, null, rootPkgNode.symbol.owner, this.builtinPos); this.trueType = new BFiniteType(finiteTypeSymbol, new HashSet<BLangExpression>() {{ add(trueLiteral); }}); } public BType getTypeFromTag(int tag) { switch (tag) { case TypeTags.INT: return intType; case TypeTags.BYTE: return byteType; case TypeTags.FLOAT: return floatType; case TypeTags.DECIMAL: return decimalType; case TypeTags.STRING: return stringType; case TypeTags.BOOLEAN: return booleanType; case TypeTags.JSON: return jsonType; case TypeTags.XML: return xmlType; case TypeTags.XML_COMMENT: return xmlCommentType; case TypeTags.XML_PI: return xmlPIType; case TypeTags.XML_ELEMENT: return xmlElementType; case TypeTags.XML_TEXT: return xmlTextType; case TypeTags.STREAM: return streamType; case TypeTags.TABLE: return tableType; case TypeTags.NIL: return nilType; case TypeTags.NEVER: return neverType; case TypeTags.ERROR: return errorType; case TypeTags.SIGNED32_INT: return signed32IntType; case TypeTags.SIGNED16_INT: return signed16IntType; case TypeTags.SIGNED8_INT: return signed8IntType; case TypeTags.UNSIGNED32_INT: return unsigned32IntType; case TypeTags.UNSIGNED16_INT: return unsigned16IntType; case TypeTags.UNSIGNED8_INT: return unsigned8IntType; case TypeTags.CHAR_STRING: return charStringType; default: return semanticError; } } public BType getLangLibSubType(String name) { switch (name) { case Names.STRING_SIGNED32: return this.signed32IntType; case Names.STRING_SIGNED16: return this.signed16IntType; case Names.STRING_SIGNED8: return this.signed8IntType; case Names.STRING_UNSIGNED32: return this.unsigned32IntType; case Names.STRING_UNSIGNED16: return this.unsigned16IntType; case Names.STRING_UNSIGNED8: return this.unsigned8IntType; case Names.STRING_CHAR: return this.charStringType; case Names.STRING_XML_ELEMENT: return this.xmlElementType; case Names.STRING_XML_PI: return this.xmlPIType; case Names.STRING_XML_COMMENT: return this.xmlCommentType; case Names.STRING_XML_TEXT: return this.xmlTextType; } throw new IllegalStateException("LangLib Subtype not found: " + name); } private void initializeType(BType type, String name) { initializeType(type, names.fromString(name)); } private void initializeType(BType type, Name name) { defineType(type, new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, rootPkgSymbol.pkgID, type, rootPkgSymbol, builtinPos)); } private void initializeTSymbol(BType type, Name name, PackageID packageID) { type.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, packageID, type, rootPkgSymbol, builtinPos); } private void defineType(BType type, BTypeSymbol tSymbol) { type.tsymbol = tSymbol; rootScope.define(tSymbol.name, tSymbol); } public void defineOperators() { defineIntegerArithmeticOperations(); defineXmlStringConcatanationOperations(); defineBinaryOperator(OperatorKind.ADD, stringType, stringType, stringType); defineBinaryOperator(OperatorKind.ADD, stringType, charStringType, stringType); defineBinaryOperator(OperatorKind.ADD, charStringType, stringType, stringType); defineBinaryOperator(OperatorKind.ADD, charStringType, charStringType, stringType); defineBinaryOperator(OperatorKind.ADD, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.ADD, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, intType, floatType, floatType); defineBinaryOperator(OperatorKind.ADD, floatType, intType, floatType); defineBinaryOperator(OperatorKind.ADD, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.ADD, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.SUB, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, intType, floatType); defineBinaryOperator(OperatorKind.SUB, intType, floatType, floatType); defineBinaryOperator(OperatorKind.SUB, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.SUB, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.SUB, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.DIV, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, intType, floatType, floatType); defineBinaryOperator(OperatorKind.DIV, floatType, intType, floatType); defineBinaryOperator(OperatorKind.DIV, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.DIV, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.MUL, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, intType, floatType); defineBinaryOperator(OperatorKind.MUL, intType, floatType, floatType); defineBinaryOperator(OperatorKind.MUL, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.MUL, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MUL, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MOD, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.MOD, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MOD, floatType, intType, floatType); defineBinaryOperator(OperatorKind.MOD, intType, floatType, floatType); defineBinaryOperator(OperatorKind.MOD, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.MOD, intType, decimalType, decimalType); defineIntegerBitwiseAndOperations(); defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_OR); defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_XOR); defineIntegerLeftShiftOperations(); defineIntegerRightShiftOperations(OperatorKind.BITWISE_RIGHT_SHIFT); defineIntegerRightShiftOperations(OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT); defineBinaryOperator(OperatorKind.EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, intType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.AND, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.OR, booleanType, booleanType, booleanType); defineUnaryOperator(OperatorKind.ADD, floatType, floatType); defineUnaryOperator(OperatorKind.ADD, decimalType, decimalType); defineUnaryOperator(OperatorKind.ADD, intType, intType); defineUnaryOperator(OperatorKind.SUB, floatType, floatType); defineUnaryOperator(OperatorKind.SUB, decimalType, decimalType); defineUnaryOperator(OperatorKind.SUB, intType, intType); defineUnaryOperator(OperatorKind.NOT, booleanType, booleanType); defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, byteType, byteType); defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, intType, intType); } private void defineXmlStringConcatanationOperations() { defineBinaryOperator(OperatorKind.ADD, xmlType, stringType, xmlType); defineBinaryOperator(OperatorKind.ADD, xmlType, charStringType, xmlType); defineBinaryOperator(OperatorKind.ADD, stringType, xmlType, xmlType); defineBinaryOperator(OperatorKind.ADD, charStringType, xmlType, xmlType); defineBinaryOperator(OperatorKind.ADD, stringType, xmlTextType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, charStringType, xmlTextType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, xmlTextType, stringType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, xmlTextType, charStringType, xmlTextType); } private void defineIntegerArithmeticOperations() { BType[] intTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType lhs : intTypes) { for (BType rhs : intTypes) { defineBinaryOperator(OperatorKind.ADD, lhs, rhs, intType); defineBinaryOperator(OperatorKind.SUB, lhs, rhs, intType); defineBinaryOperator(OperatorKind.DIV, lhs, rhs, intType); defineBinaryOperator(OperatorKind.MUL, lhs, rhs, intType); defineBinaryOperator(OperatorKind.MOD, lhs, rhs, intType); } } } private void defineIntegerBitwiseAndOperations() { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; for (BType unsigned : unsignedIntTypes) { for (BType signed : signedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, unsigned, signed, unsigned); } } for (int i = 0; i < unsignedIntTypes.length; i++) { for (int j = 0; j < unsignedIntTypes.length; j++) { BType unsignedIntTypeLhs = unsignedIntTypes[i]; BType unsignedIntTypeRhs = unsignedIntTypes[j]; defineBinaryOperator(OperatorKind.BITWISE_AND, unsignedIntTypeLhs, unsignedIntTypeRhs, i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs); } } for (BType signed : signedIntTypes) { for (BType unsigned : unsignedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, signed, unsigned, unsigned); } } for (BType signedLhs : signedIntTypes) { for (BType signedRhs : signedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, signedLhs, signedRhs, intType); } } } private void defineIntegerBitwiseOrOperations(OperatorKind orOpKind) { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; for (BType unsigned : unsignedIntTypes) { for (BType signed : signedIntTypes) { defineBinaryOperator(orOpKind, unsigned, signed, intType); } } for (int i = 0; i < unsignedIntTypes.length; i++) { for (int j = 0; j < unsignedIntTypes.length; j++) { BType unsignedIntTypeLhs = unsignedIntTypes[i]; BType unsignedIntTypeRhs = unsignedIntTypes[j]; defineBinaryOperator(orOpKind, unsignedIntTypeLhs, unsignedIntTypeRhs, i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs); } } for (BType signed : signedIntTypes) { for (BType unsigned : unsignedIntTypes) { defineBinaryOperator(orOpKind, signed, unsigned, intType); } } for (BType signedLhs : signedIntTypes) { for (BType signedRhs : signedIntTypes) { defineBinaryOperator(orOpKind, signedLhs, signedRhs, intType); } } } private void defineIntegerLeftShiftOperations() { BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType lhs : allIntTypes) { for (BType rhs : allIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_LEFT_SHIFT, lhs, rhs, intType); } } } private void defineIntegerRightShiftOperations(OperatorKind rightShiftOpKind) { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType unsignedLhs : unsignedIntTypes) { for (BType intRhs : allIntTypes) { defineBinaryOperator(rightShiftOpKind, unsignedLhs, intRhs, unsignedLhs); } } for (BType signedLhs : signedIntTypes) { for (BType intRhs : allIntTypes) { defineBinaryOperator(rightShiftOpKind, signedLhs, intRhs, intType); } } } public void defineBinaryOperator(OperatorKind kind, BType lhsType, BType rhsType, BType retType) { List<BType> paramTypes = Lists.of(lhsType, rhsType); defineOperator(names.fromString(kind.value()), paramTypes, retType); } private void defineUnaryOperator(OperatorKind kind, BType type, BType retType) { List<BType> paramTypes = Lists.of(type); defineOperator(names.fromString(kind.value()), paramTypes, retType); } private void defineOperator(Name name, List<BType> paramTypes, BType retType) { BInvokableType opType = new BInvokableType(paramTypes, retType, null); BOperatorSymbol symbol = new BOperatorSymbol(name, rootPkgSymbol.pkgID, opType, rootPkgSymbol, this.builtinPos); rootScope.define(name, symbol); } }
class SymbolTable { private static final CompilerContext.Key<SymbolTable> SYM_TABLE_KEY = new CompilerContext.Key<>(); public static final PackageID TRANSACTION = new PackageID(Names.BUILTIN_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY); public static final Integer BBYTE_MIN_VALUE = 0; public static final Integer BBYTE_MAX_VALUE = 255; public static final Integer SIGNED32_MAX_VALUE = 2147483647; public static final Integer SIGNED32_MIN_VALUE = -2147483648; public static final Integer SIGNED16_MAX_VALUE = 32767; public static final Integer SIGNED16_MIN_VALUE = -32768; public static final Integer SIGNED8_MAX_VALUE = 127; public static final Integer SIGNED8_MIN_VALUE = -128; public static final Long UNSIGNED32_MAX_VALUE = 4294967295L; public static final Integer UNSIGNED16_MAX_VALUE = 65535; public static final Integer UNSIGNED8_MAX_VALUE = 255; public final DiagnosticPos builtinPos; public final BLangPackage rootPkgNode; public final BPackageSymbol rootPkgSymbol; public final BSymbol notFoundSymbol; public final Scope rootScope; public final BType noType = new BNoType(TypeTags.NONE); public final BType nilType = new BNilType(); public final BType neverType = new BNeverType(); public final BType intType = new BType(TypeTags.INT, null, Flags.READONLY); public final BType byteType = new BType(TypeTags.BYTE, null, Flags.READONLY); public final BType floatType = new BType(TypeTags.FLOAT, null, Flags.READONLY); public final BType decimalType = new BType(TypeTags.DECIMAL, null, Flags.READONLY); public final BType stringType = new BType(TypeTags.STRING, null, Flags.READONLY); public final BType booleanType = new BType(TypeTags.BOOLEAN, null, Flags.READONLY); public final BType jsonType = new BJSONType(TypeTags.JSON, null); public final BType anyType = new BAnyType(TypeTags.ANY, null); public final BType anydataType = new BAnydataType(TypeTags.ANYDATA, null); public final BMapType mapType = new BMapType(TypeTags.MAP, anyType, null); public final BMapType mapStringType = new BMapType(TypeTags.MAP, stringType, null); public final BMapType mapAnydataType = new BMapType(TypeTags.MAP, anydataType, null); public final BMapType mapJsonType = new BMapType(TypeTags.MAP, jsonType, null); public final BFutureType futureType = new BFutureType(TypeTags.FUTURE, nilType, null); public final BArrayType arrayType = new BArrayType(anyType); public final BArrayType arrayStringType = new BArrayType(stringType); public final BArrayType arrayAnydataType = new BArrayType(anydataType); public final BArrayType arrayJsonType = new BArrayType(jsonType); public final BType tupleType = new BTupleType(Lists.of(noType)); public final BType recordType = new BRecordType(null); public final BType stringArrayType = new BArrayType(stringType); public final BType jsonArrayType = new BArrayType(jsonType); public final BType anydataArrayType = new BArrayType(anydataType); public final BType anyServiceType = new BServiceType(null); public final BType handleType = new BHandleType(TypeTags.HANDLE, null); public final BTypedescType typeDesc = new BTypedescType(this.anyType, null); public final BType readonlyType = new BReadonlyType(TypeTags.READONLY, null); public final BType anydataOrReadonly = BUnionType.create(null, anydataType, readonlyType); public final BType semanticError = new BType(TypeTags.SEMANTIC_ERROR, null); public final BType nullSet = new BType(TypeTags.NULL_SET, null); public final BUnionType anydataOrReadOnlyType = BUnionType.create(null, anydataType, readonlyType); public BType streamType = new BStreamType(TypeTags.STREAM, anydataType, null, null); public BType tableType = new BTableType(TypeTags.TABLE, anydataType, null); public BMapType detailType = new BMapType(TypeTags.MAP, anydataOrReadonly, null); public BErrorType errorType = new BErrorType(null, detailType); public BConstructorSymbol errorConstructor; public BUnionType anyOrErrorType; public BUnionType pureType; public BUnionType errorOrNilType; public BFiniteType trueType; public BObjectType intRangeType; public BMapType mapAllType; public BArrayType arrayAllType; public BObjectType rawTemplateType; public final BIntSubType signed32IntType = new BIntSubType(TypeTags.SIGNED32_INT, Names.SIGNED32); public final BIntSubType signed16IntType = new BIntSubType(TypeTags.SIGNED16_INT, Names.SIGNED16); public final BIntSubType signed8IntType = new BIntSubType(TypeTags.SIGNED8_INT, Names.SIGNED8); public final BIntSubType unsigned32IntType = new BIntSubType(TypeTags.UNSIGNED32_INT, Names.UNSIGNED32); public final BIntSubType unsigned16IntType = new BIntSubType(TypeTags.UNSIGNED16_INT, Names.UNSIGNED16); public final BIntSubType unsigned8IntType = new BIntSubType(TypeTags.UNSIGNED8_INT, Names.UNSIGNED8); public final BStringSubType charStringType = new BStringSubType(TypeTags.CHAR_STRING, Names.CHAR); public final BXMLSubType xmlElementType = new BXMLSubType(TypeTags.XML_ELEMENT, Names.XML_ELEMENT); public final BXMLSubType xmlPIType = new BXMLSubType(TypeTags.XML_PI, Names.XML_PI); public final BXMLSubType xmlCommentType = new BXMLSubType(TypeTags.XML_COMMENT, Names.XML_COMMENT); public final BXMLSubType xmlTextType = new BXMLSubType(TypeTags.XML_TEXT, Names.XML_TEXT, Flags.READONLY); public final BType xmlType = new BXMLType(BUnionType.create(null, xmlElementType, xmlCommentType, xmlPIType, xmlTextType), null); public BPackageSymbol langInternalModuleSymbol; public BPackageSymbol langAnnotationModuleSymbol; public BPackageSymbol langArrayModuleSymbol; public BPackageSymbol langDecimalModuleSymbol; public BPackageSymbol langErrorModuleSymbol; public BPackageSymbol langFloatModuleSymbol; public BPackageSymbol langFutureModuleSymbol; public BPackageSymbol langIntModuleSymbol; public BPackageSymbol langMapModuleSymbol; public BPackageSymbol langObjectModuleSymbol; public BPackageSymbol langStreamModuleSymbol; public BPackageSymbol langStringModuleSymbol; public BPackageSymbol langTableModuleSymbol; public BPackageSymbol langTypedescModuleSymbol; public BPackageSymbol langValueModuleSymbol; public BPackageSymbol langXmlModuleSymbol; public BPackageSymbol langBooleanModuleSymbol; public BPackageSymbol langQueryModuleSymbol; public BPackageSymbol langTransactionModuleSymbol; private Names names; public Map<BPackageSymbol, SymbolEnv> pkgEnvMap = new HashMap<>(); public Map<Name, BPackageSymbol> predeclaredModules = new HashMap<>(); public static SymbolTable getInstance(CompilerContext context) { SymbolTable symTable = context.get(SYM_TABLE_KEY); if (symTable == null) { symTable = new SymbolTable(context); } return symTable; } private SymbolTable(CompilerContext context) { context.put(SYM_TABLE_KEY, this); this.names = Names.getInstance(context); this.rootPkgNode = (BLangPackage) TreeBuilder.createPackageNode(); this.rootPkgSymbol = new BPackageSymbol(PackageID.ANNOTATIONS, null, null, BUILTIN); this.builtinPos = new DiagnosticPos(new BDiagnosticSource(rootPkgSymbol.pkgID, Names.EMPTY.value), 0, 0, 0, 0); this.rootPkgNode.pos = this.builtinPos; this.rootPkgNode.symbol = this.rootPkgSymbol; this.rootScope = new Scope(rootPkgSymbol); this.rootPkgSymbol.scope = this.rootScope; this.rootPkgSymbol.pos = this.builtinPos; this.notFoundSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID, rootPkgSymbol.pkgID, noType, rootPkgSymbol, builtinPos, SymbolOrigin.VIRTUAL); initializeType(intType, TypeKind.INT.typeName(), BUILTIN); initializeType(byteType, TypeKind.BYTE.typeName(), BUILTIN); initializeType(floatType, TypeKind.FLOAT.typeName(), BUILTIN); initializeType(decimalType, TypeKind.DECIMAL.typeName(), BUILTIN); initializeType(stringType, TypeKind.STRING.typeName(), BUILTIN); initializeType(booleanType, TypeKind.BOOLEAN.typeName(), BUILTIN); initializeType(jsonType, TypeKind.JSON.typeName(), BUILTIN); initializeType(xmlType, TypeKind.XML.typeName(), BUILTIN); initializeType(streamType, TypeKind.STREAM.typeName(), BUILTIN); initializeType(tableType, TypeKind.TABLE.typeName(), BUILTIN); initializeType(mapType, TypeKind.MAP.typeName(), VIRTUAL); initializeType(mapStringType, TypeKind.MAP.typeName(), VIRTUAL); initializeType(mapAnydataType, TypeKind.MAP.typeName(), VIRTUAL); initializeType(futureType, TypeKind.FUTURE.typeName(), BUILTIN); initializeType(anyType, TypeKind.ANY.typeName(), BUILTIN); initializeType(anydataType, TypeKind.ANYDATA.typeName(), BUILTIN); initializeType(nilType, TypeKind.NIL.typeName(), BUILTIN); initializeType(neverType, TypeKind.NEVER.typeName(), BUILTIN); initializeType(anyServiceType, TypeKind.SERVICE.typeName(), BUILTIN); initializeType(handleType, TypeKind.HANDLE.typeName(), BUILTIN); initializeType(typeDesc, TypeKind.TYPEDESC.typeName(), BUILTIN); initializeType(readonlyType, TypeKind.READONLY.typeName(), BUILTIN); initializeTSymbol(signed32IntType, Names.SIGNED32, PackageID.INT); initializeTSymbol(signed16IntType, Names.SIGNED16, PackageID.INT); initializeTSymbol(signed8IntType, Names.SIGNED8, PackageID.INT); initializeTSymbol(unsigned32IntType, Names.UNSIGNED32, PackageID.INT); initializeTSymbol(unsigned16IntType, Names.UNSIGNED16, PackageID.INT); initializeTSymbol(unsigned8IntType, Names.UNSIGNED8, PackageID.INT); initializeTSymbol(charStringType, Names.CHAR, PackageID.STRING); initializeTSymbol(xmlElementType, Names.XML_ELEMENT, PackageID.XML); initializeTSymbol(xmlPIType, Names.XML_PI, PackageID.XML); initializeTSymbol(xmlCommentType, Names.XML_COMMENT, PackageID.XML); initializeTSymbol(xmlTextType, Names.XML_TEXT, PackageID.XML); BLangLiteral trueLiteral = new BLangLiteral(); trueLiteral.type = this.booleanType; trueLiteral.value = Boolean.TRUE; BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, Flags.PUBLIC, names.fromString("$anonType$TRUE"), rootPkgNode.packageID, null, rootPkgNode.symbol.owner, this.builtinPos, VIRTUAL); this.trueType = new BFiniteType(finiteTypeSymbol, new HashSet<BLangExpression>() {{ add(trueLiteral); }}); } public BType getTypeFromTag(int tag) { switch (tag) { case TypeTags.INT: return intType; case TypeTags.BYTE: return byteType; case TypeTags.FLOAT: return floatType; case TypeTags.DECIMAL: return decimalType; case TypeTags.STRING: return stringType; case TypeTags.BOOLEAN: return booleanType; case TypeTags.JSON: return jsonType; case TypeTags.XML: return xmlType; case TypeTags.XML_COMMENT: return xmlCommentType; case TypeTags.XML_PI: return xmlPIType; case TypeTags.XML_ELEMENT: return xmlElementType; case TypeTags.XML_TEXT: return xmlTextType; case TypeTags.STREAM: return streamType; case TypeTags.TABLE: return tableType; case TypeTags.NIL: return nilType; case TypeTags.NEVER: return neverType; case TypeTags.ERROR: return errorType; case TypeTags.SIGNED32_INT: return signed32IntType; case TypeTags.SIGNED16_INT: return signed16IntType; case TypeTags.SIGNED8_INT: return signed8IntType; case TypeTags.UNSIGNED32_INT: return unsigned32IntType; case TypeTags.UNSIGNED16_INT: return unsigned16IntType; case TypeTags.UNSIGNED8_INT: return unsigned8IntType; case TypeTags.CHAR_STRING: return charStringType; default: return semanticError; } } public BType getLangLibSubType(String name) { switch (name) { case Names.STRING_SIGNED32: return this.signed32IntType; case Names.STRING_SIGNED16: return this.signed16IntType; case Names.STRING_SIGNED8: return this.signed8IntType; case Names.STRING_UNSIGNED32: return this.unsigned32IntType; case Names.STRING_UNSIGNED16: return this.unsigned16IntType; case Names.STRING_UNSIGNED8: return this.unsigned8IntType; case Names.STRING_CHAR: return this.charStringType; case Names.STRING_XML_ELEMENT: return this.xmlElementType; case Names.STRING_XML_PI: return this.xmlPIType; case Names.STRING_XML_COMMENT: return this.xmlCommentType; case Names.STRING_XML_TEXT: return this.xmlTextType; } throw new IllegalStateException("LangLib Subtype not found: " + name); } private void initializeType(BType type, String name, SymbolOrigin origin) { initializeType(type, names.fromString(name), origin); } private void initializeType(BType type, Name name, SymbolOrigin origin) { defineType(type, new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, rootPkgSymbol.pkgID, type, rootPkgSymbol, builtinPos, origin)); } private void initializeTSymbol(BType type, Name name, PackageID packageID) { type.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, packageID, type, rootPkgSymbol, builtinPos, BUILTIN); } private void defineType(BType type, BTypeSymbol tSymbol) { type.tsymbol = tSymbol; rootScope.define(tSymbol.name, tSymbol); } public void defineOperators() { defineIntegerArithmeticOperations(); defineXmlStringConcatanationOperations(); defineBinaryOperator(OperatorKind.ADD, stringType, stringType, stringType); defineBinaryOperator(OperatorKind.ADD, stringType, charStringType, stringType); defineBinaryOperator(OperatorKind.ADD, charStringType, stringType, stringType); defineBinaryOperator(OperatorKind.ADD, charStringType, charStringType, stringType); defineBinaryOperator(OperatorKind.ADD, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.ADD, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, intType, floatType, floatType); defineBinaryOperator(OperatorKind.ADD, floatType, intType, floatType); defineBinaryOperator(OperatorKind.ADD, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.ADD, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.ADD, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.SUB, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, intType, floatType); defineBinaryOperator(OperatorKind.SUB, intType, floatType, floatType); defineBinaryOperator(OperatorKind.SUB, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.SUB, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.SUB, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.SUB, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.DIV, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, intType, floatType, floatType); defineBinaryOperator(OperatorKind.DIV, floatType, intType, floatType); defineBinaryOperator(OperatorKind.DIV, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.DIV, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.DIV, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.MUL, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, intType, floatType); defineBinaryOperator(OperatorKind.MUL, intType, floatType, floatType); defineBinaryOperator(OperatorKind.MUL, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.MUL, intType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MUL, decimalType, floatType, decimalType); defineBinaryOperator(OperatorKind.MUL, floatType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MOD, floatType, floatType, floatType); defineBinaryOperator(OperatorKind.MOD, decimalType, decimalType, decimalType); defineBinaryOperator(OperatorKind.MOD, floatType, intType, floatType); defineBinaryOperator(OperatorKind.MOD, intType, floatType, floatType); defineBinaryOperator(OperatorKind.MOD, decimalType, intType, decimalType); defineBinaryOperator(OperatorKind.MOD, intType, decimalType, decimalType); defineIntegerBitwiseAndOperations(); defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_OR); defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_XOR); defineIntegerLeftShiftOperations(); defineIntegerRightShiftOperations(OperatorKind.BITWISE_RIGHT_SHIFT); defineIntegerRightShiftOperations(OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT); defineBinaryOperator(OperatorKind.EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.EQUAL, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, intType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, jsonType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, jsonType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, anyType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, anyType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, anydataType, nilType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, anydataType, booleanType); defineBinaryOperator(OperatorKind.EQUALS, nilType, nilType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, stringType, stringType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, byteType, booleanType); defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, floatType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, intType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, decimalType, booleanType); defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, floatType, booleanType); defineBinaryOperator(OperatorKind.AND, booleanType, booleanType, booleanType); defineBinaryOperator(OperatorKind.OR, booleanType, booleanType, booleanType); defineUnaryOperator(OperatorKind.ADD, floatType, floatType); defineUnaryOperator(OperatorKind.ADD, decimalType, decimalType); defineUnaryOperator(OperatorKind.ADD, intType, intType); defineUnaryOperator(OperatorKind.SUB, floatType, floatType); defineUnaryOperator(OperatorKind.SUB, decimalType, decimalType); defineUnaryOperator(OperatorKind.SUB, intType, intType); defineUnaryOperator(OperatorKind.NOT, booleanType, booleanType); defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, byteType, byteType); defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, intType, intType); } private void defineXmlStringConcatanationOperations() { defineBinaryOperator(OperatorKind.ADD, xmlType, stringType, xmlType); defineBinaryOperator(OperatorKind.ADD, xmlType, charStringType, xmlType); defineBinaryOperator(OperatorKind.ADD, stringType, xmlType, xmlType); defineBinaryOperator(OperatorKind.ADD, charStringType, xmlType, xmlType); defineBinaryOperator(OperatorKind.ADD, stringType, xmlTextType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, charStringType, xmlTextType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, xmlTextType, stringType, xmlTextType); defineBinaryOperator(OperatorKind.ADD, xmlTextType, charStringType, xmlTextType); } private void defineIntegerArithmeticOperations() { BType[] intTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType lhs : intTypes) { for (BType rhs : intTypes) { defineBinaryOperator(OperatorKind.ADD, lhs, rhs, intType); defineBinaryOperator(OperatorKind.SUB, lhs, rhs, intType); defineBinaryOperator(OperatorKind.DIV, lhs, rhs, intType); defineBinaryOperator(OperatorKind.MUL, lhs, rhs, intType); defineBinaryOperator(OperatorKind.MOD, lhs, rhs, intType); } } } private void defineIntegerBitwiseAndOperations() { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; for (BType unsigned : unsignedIntTypes) { for (BType signed : signedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, unsigned, signed, unsigned); } } for (int i = 0; i < unsignedIntTypes.length; i++) { for (int j = 0; j < unsignedIntTypes.length; j++) { BType unsignedIntTypeLhs = unsignedIntTypes[i]; BType unsignedIntTypeRhs = unsignedIntTypes[j]; defineBinaryOperator(OperatorKind.BITWISE_AND, unsignedIntTypeLhs, unsignedIntTypeRhs, i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs); } } for (BType signed : signedIntTypes) { for (BType unsigned : unsignedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, signed, unsigned, unsigned); } } for (BType signedLhs : signedIntTypes) { for (BType signedRhs : signedIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_AND, signedLhs, signedRhs, intType); } } } private void defineIntegerBitwiseOrOperations(OperatorKind orOpKind) { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; for (BType unsigned : unsignedIntTypes) { for (BType signed : signedIntTypes) { defineBinaryOperator(orOpKind, unsigned, signed, intType); } } for (int i = 0; i < unsignedIntTypes.length; i++) { for (int j = 0; j < unsignedIntTypes.length; j++) { BType unsignedIntTypeLhs = unsignedIntTypes[i]; BType unsignedIntTypeRhs = unsignedIntTypes[j]; defineBinaryOperator(orOpKind, unsignedIntTypeLhs, unsignedIntTypeRhs, i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs); } } for (BType signed : signedIntTypes) { for (BType unsigned : unsignedIntTypes) { defineBinaryOperator(orOpKind, signed, unsigned, intType); } } for (BType signedLhs : signedIntTypes) { for (BType signedRhs : signedIntTypes) { defineBinaryOperator(orOpKind, signedLhs, signedRhs, intType); } } } private void defineIntegerLeftShiftOperations() { BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType lhs : allIntTypes) { for (BType rhs : allIntTypes) { defineBinaryOperator(OperatorKind.BITWISE_LEFT_SHIFT, lhs, rhs, intType); } } } private void defineIntegerRightShiftOperations(OperatorKind rightShiftOpKind) { BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType}; BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType}; BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType, unsigned16IntType, unsigned8IntType}; for (BType unsignedLhs : unsignedIntTypes) { for (BType intRhs : allIntTypes) { defineBinaryOperator(rightShiftOpKind, unsignedLhs, intRhs, unsignedLhs); } } for (BType signedLhs : signedIntTypes) { for (BType intRhs : allIntTypes) { defineBinaryOperator(rightShiftOpKind, signedLhs, intRhs, intType); } } } public void defineBinaryOperator(OperatorKind kind, BType lhsType, BType rhsType, BType retType) { List<BType> paramTypes = Lists.of(lhsType, rhsType); defineOperator(names.fromString(kind.value()), paramTypes, retType); } private void defineUnaryOperator(OperatorKind kind, BType type, BType retType) { List<BType> paramTypes = Lists.of(type); defineOperator(names.fromString(kind.value()), paramTypes, retType); } private void defineOperator(Name name, List<BType> paramTypes, BType retType) { BInvokableType opType = new BInvokableType(paramTypes, retType, null); BOperatorSymbol symbol = new BOperatorSymbol(name, rootPkgSymbol.pkgID, opType, rootPkgSymbol, this.builtinPos, BUILTIN); rootScope.define(name, symbol); } }
the rule only takes effect only when group by keys contain all unique key. If group by key is (colA, colB) but unique key is (colA, colB, colC), we cannot remove the agg.
public boolean check(OptExpression input, OptimizerContext context) { LogicalAggregationOperator aggOp = input.getOp().cast(); if (aggOp.getGroupingKeys().isEmpty()) { return false; } for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggOp.getAggregations().entrySet()) { String fnName = entry.getValue().getFnName(); if (!(fnName.equals(FunctionSet.SUM) || fnName.equals(FunctionSet.COUNT) || fnName.equals(FunctionSet.AVG) || fnName.equals(FunctionSet.FIRST_VALUE) || fnName.equals(FunctionSet.MAX) || fnName.equals(FunctionSet.MIN) || fnName.equals(FunctionSet.GROUP_CONCAT))) { return false; } } UKFKConstraintsCollector collector = new UKFKConstraintsCollector(); input.getOp().accept(collector, input, null); OptExpression childOptExpression = input.inputAt(0); for (ColumnRefOperator columnRefOperator : aggOp.getGroupingKeys()) { if (childOptExpression.getConstraints().getUniqueConstraint(columnRefOperator.getId()) == null) { return false; } } return true; }
if (childOptExpression.getConstraints().getUniqueConstraint(columnRefOperator.getId()) == null) {
public boolean check(OptExpression input, OptimizerContext context) { LogicalAggregationOperator aggOp = input.getOp().cast(); List<ColumnRefOperator> groupKeys = aggOp.getGroupingKeys(); for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggOp.getAggregations().entrySet()) { if (entry.getValue().isDistinct()) { return false; } String fnName = entry.getValue().getFnName(); if (!(fnName.equals(FunctionSet.SUM) || fnName.equals(FunctionSet.COUNT) || fnName.equals(FunctionSet.AVG) || fnName.equals(FunctionSet.FIRST_VALUE) || fnName.equals(FunctionSet.MAX) || fnName.equals(FunctionSet.MIN) || fnName.equals(FunctionSet.GROUP_CONCAT))) { return false; } } UKFKConstraintsCollector collector = new UKFKConstraintsCollector(); input.getOp().accept(collector, input, null); OptExpression childOptExpression = input.inputAt(0); Map<Integer, UKFKConstraints.UniqueConstraintWrapper> uniqueKeys = childOptExpression.getConstraints().getTableUniqueKeys(); if (uniqueKeys.isEmpty()) { return false; } if (uniqueKeys.size() != groupKeys.size()) { return false; } Set<Integer> groupColumnRefIds = groupKeys.stream() .map(ColumnRefOperator::getId) .collect(Collectors.toSet()); Set<Integer> uniqueColumnRefIds = uniqueKeys.keySet().stream().collect(Collectors.toSet()); if (!groupColumnRefIds.equals(uniqueColumnRefIds)) { return false; } return true; }
class EliminateAggRule extends TransformationRule { private EliminateAggRule() { super(RuleType.TF_ELIMINATE_AGG, Pattern.create(OperatorType.LOGICAL_AGGR) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF))); } public static EliminateAggRule getInstance() { return INSTANCE; } private static final EliminateAggRule INSTANCE = new EliminateAggRule(); @Override @Override public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalAggregationOperator aggOp = input.getOp().cast(); Map<ColumnRefOperator, ScalarOperator> newProjectMap = new HashMap<>(); for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggOp.getAggregations().entrySet()) { String fnName = entry.getValue().getFnName(); ScalarOperator newOperator = handleAggregationFunction(fnName, entry.getValue()); if (newOperator != null) { newProjectMap.put(entry.getKey(), newOperator); } } LogicalProjectOperator newProjectOp = LogicalProjectOperator.builder().setColumnRefMap(newProjectMap).build(); return List.of(OptExpression.create(newProjectOp, input.inputAt(0).getInputs())); } private ScalarOperator handleAggregationFunction(String fnName, CallOperator callOperator) { if (fnName.equals(FunctionSet.COUNT)) { return rewriteCountFunction(callOperator); } else if (fnName.equals(FunctionSet.SUM) || fnName.equals(FunctionSet.AVG) || fnName.equals(FunctionSet.FIRST_VALUE) || fnName.equals(FunctionSet.MAX) || fnName.equals(FunctionSet.MIN) || fnName.equals(FunctionSet.GROUP_CONCAT)) { return callOperator.getArguments().get(0); } return null; } private ScalarOperator rewriteCountFunction(CallOperator callOperator) { IsNullPredicateOperator isNullPredicateOperator = new IsNullPredicateOperator(callOperator.getArguments().get(0)); ArrayList<ScalarOperator> ifArgs = Lists.newArrayList(); ScalarOperator thenExpr = ConstantOperator.createInt(0); ScalarOperator elseExpr = ConstantOperator.createInt(1); ifArgs.add(isNullPredicateOperator); ifArgs.add(thenExpr); ifArgs.add(elseExpr); Type[] argumentTypes = ifArgs.stream().map(ScalarOperator::getType).toArray(Type[]::new); Function fn = Expr.getBuiltinFunction(FunctionSet.IF, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); return new CallOperator(FunctionSet.IF, ScalarType.createType(PrimitiveType.TINYINT), ifArgs, fn); } }
class EliminateAggRule extends TransformationRule { private EliminateAggRule() { super(RuleType.TF_ELIMINATE_AGG, Pattern.create(OperatorType.LOGICAL_AGGR) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF))); } public static EliminateAggRule getInstance() { return INSTANCE; } private static final EliminateAggRule INSTANCE = new EliminateAggRule(); @Override @Override public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalAggregationOperator aggOp = input.getOp().cast(); Map<ColumnRefOperator, ScalarOperator> newProjectMap = new HashMap<>(); for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggOp.getAggregations().entrySet()) { ColumnRefOperator aggColumnRef = entry.getKey(); CallOperator callOperator = entry.getValue(); ScalarOperator newOperator = handleAggregationFunction(callOperator.getFnName(), callOperator); newProjectMap.put(aggColumnRef, newOperator); } aggOp.getGroupingKeys() .forEach(columnRefOperator -> newProjectMap.put(columnRefOperator, columnRefOperator)); LogicalProjectOperator newProjectOp = LogicalProjectOperator.builder().setColumnRefMap(newProjectMap).build(); return List.of(OptExpression.create(newProjectOp, input.inputAt(0))); } private ScalarOperator handleAggregationFunction(String fnName, CallOperator callOperator) { if (fnName.equals(FunctionSet.COUNT)) { return rewriteCountFunction(callOperator); } else if (fnName.equals(FunctionSet.SUM) || fnName.equals(FunctionSet.AVG) || fnName.equals(FunctionSet.FIRST_VALUE) || fnName.equals(FunctionSet.MAX) || fnName.equals(FunctionSet.MIN) || fnName.equals(FunctionSet.GROUP_CONCAT)) { return rewriteCastFunction(callOperator); } return callOperator; } private ScalarOperator rewriteCountFunction(CallOperator callOperator) { if (callOperator.getArguments().isEmpty()) { return ConstantOperator.createInt(1); } IsNullPredicateOperator isNullPredicateOperator = new IsNullPredicateOperator(callOperator.getArguments().get(0)); ArrayList<ScalarOperator> ifArgs = Lists.newArrayList(); ScalarOperator thenExpr = ConstantOperator.createInt(0); ScalarOperator elseExpr = ConstantOperator.createInt(1); ifArgs.add(isNullPredicateOperator); ifArgs.add(thenExpr); ifArgs.add(elseExpr); Type[] argumentTypes = ifArgs.stream().map(ScalarOperator::getType).toArray(Type[]::new); Function fn = Expr.getBuiltinFunction(FunctionSet.IF, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); return new CallOperator(FunctionSet.IF, ScalarType.createType(PrimitiveType.TINYINT), ifArgs, fn); } private ScalarOperator rewriteCastFunction(CallOperator callOperator) { ScalarOperator argument = callOperator.getArguments().get(0); if (callOperator.getType().equals(argument.getType())) { return argument; } ScalarOperator scalarOperator = new CastOperator(callOperator.getType(), argument); return scalarOperator; } }
Out of time for today, but I just noticed this can happen in other tests as well. For example in [this run](https://github.com/apache/beam/pull/13128/checks?check_run_id=1398707923) we see the same flake in `testWhenServerHangsUpEarlyThatClientIsAbleCleanup` Leaving a note here so I remember to fix it later
public void testLogging() throws Exception { AtomicBoolean clientClosedStream = new AtomicBoolean(); Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .withOnCompleted( () -> { clientClosedStream.set(true); outboundServerObserver.get().onCompleted(); }) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); Logger rootLogger = LogManager.getLogManager().getLogger(""); Logger configuredLogger = LogManager.getLogManager().getLogger("ConfiguredLogger"); assertEquals(Level.OFF, rootLogger.getLevel()); assertEquals(Level.FINE, configuredLogger.getLevel()); rootLogger.log(FILTERED_RECORD); configuredLogger.log(TEST_RECORD); configuredLogger.log(TEST_RECORD_WITH_EXCEPTION); client.close(); assertEquals(Level.INFO, rootLogger.getLevel()); assertNull(configuredLogger.getLevel()); assertTrue(clientClosedStream.get()); assertTrue(channel.isShutdown()); assertThat(values, contains(TEST_ENTRY, TEST_ENTRY_WITH_EXCEPTION)); } finally { server.shutdownNow(); } }
assertTrue(channel.isShutdown());
public void testLogging() throws Exception { AtomicBoolean clientClosedStream = new AtomicBoolean(); Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .withOnCompleted( () -> { clientClosedStream.set(true); outboundServerObserver.get().onCompleted(); }) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); Logger rootLogger = LogManager.getLogManager().getLogger(""); Logger configuredLogger = LogManager.getLogManager().getLogger("ConfiguredLogger"); assertEquals(Level.OFF, rootLogger.getLevel()); assertEquals(Level.FINE, configuredLogger.getLevel()); rootLogger.log(FILTERED_RECORD); configuredLogger.log(TEST_RECORD); configuredLogger.log(TEST_RECORD_WITH_EXCEPTION); client.close(); assertEquals(Level.INFO, rootLogger.getLevel()); assertNull(configuredLogger.getLevel()); assertTrue(clientClosedStream.get()); assertTrue(channel.isShutdown()); assertThat(values, contains(TEST_ENTRY, TEST_ENTRY_WITH_EXCEPTION)); } finally { server.shutdownNow(); } }
class BeamFnLoggingClientTest { private static final LogRecord FILTERED_RECORD; private static final LogRecord TEST_RECORD; private static final LogRecord TEST_RECORD_WITH_EXCEPTION; static { FILTERED_RECORD = new LogRecord(Level.SEVERE, "FilteredMessage"); TEST_RECORD = new LogRecord(Level.FINE, "Message"); TEST_RECORD.setLoggerName("LoggerName"); TEST_RECORD.setMillis(1234567890L); TEST_RECORD.setThreadID(12345); TEST_RECORD_WITH_EXCEPTION = new LogRecord(Level.WARNING, "MessageWithException"); TEST_RECORD_WITH_EXCEPTION.setLoggerName("LoggerName"); TEST_RECORD_WITH_EXCEPTION.setMillis(1234567890L); TEST_RECORD_WITH_EXCEPTION.setThreadID(12345); TEST_RECORD_WITH_EXCEPTION.setThrown(new RuntimeException("ExceptionMessage")); } private static final BeamFnApi.LogEntry TEST_ENTRY = BeamFnApi.LogEntry.newBuilder() .setSeverity(BeamFnApi.LogEntry.Severity.Enum.DEBUG) .setMessage("Message") .setThread("12345") .setTimestamp(Timestamp.newBuilder().setSeconds(1234567).setNanos(890000000).build()) .setLogLocation("LoggerName") .build(); private static final BeamFnApi.LogEntry TEST_ENTRY_WITH_EXCEPTION = BeamFnApi.LogEntry.newBuilder() .setSeverity(BeamFnApi.LogEntry.Severity.Enum.WARN) .setMessage("MessageWithException") .setTrace(getStackTraceAsString(TEST_RECORD_WITH_EXCEPTION.getThrown())) .setThread("12345") .setTimestamp(Timestamp.newBuilder().setSeconds(1234567).setNanos(890000000).build()) .setLogLocation("LoggerName") .build(); @Rule public ExpectedException thrown = ExpectedException.none(); @Test @Test public void testWhenServerFailsThatClientIsAbleToCleanup() throws Exception { Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); outboundObserver.onError( Status.INTERNAL.withDescription("TEST ERROR").asException()); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); thrown.expectMessage("TEST ERROR"); client.close(); } finally { assertEquals(Level.INFO, LogManager.getLogManager().getLogger("").getLevel()); assertNull(LogManager.getLogManager().getLogger("ConfiguredLogger").getLevel()); assertTrue(channel.isShutdown()); server.shutdownNow(); } } @Test public void testWhenServerHangsUpEarlyThatClientIsAbleCleanup() throws Exception { Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); outboundObserver.onCompleted(); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); client.close(); } finally { assertEquals(Level.INFO, LogManager.getLogManager().getLogger("").getLevel()); assertNull(LogManager.getLogManager().getLogger("ConfiguredLogger").getLevel()); assertTrue(channel.isShutdown()); server.shutdownNow(); } } }
class BeamFnLoggingClientTest { private static final LogRecord FILTERED_RECORD; private static final LogRecord TEST_RECORD; private static final LogRecord TEST_RECORD_WITH_EXCEPTION; static { FILTERED_RECORD = new LogRecord(Level.SEVERE, "FilteredMessage"); TEST_RECORD = new LogRecord(Level.FINE, "Message"); TEST_RECORD.setLoggerName("LoggerName"); TEST_RECORD.setMillis(1234567890L); TEST_RECORD.setThreadID(12345); TEST_RECORD_WITH_EXCEPTION = new LogRecord(Level.WARNING, "MessageWithException"); TEST_RECORD_WITH_EXCEPTION.setLoggerName("LoggerName"); TEST_RECORD_WITH_EXCEPTION.setMillis(1234567890L); TEST_RECORD_WITH_EXCEPTION.setThreadID(12345); TEST_RECORD_WITH_EXCEPTION.setThrown(new RuntimeException("ExceptionMessage")); } private static final BeamFnApi.LogEntry TEST_ENTRY = BeamFnApi.LogEntry.newBuilder() .setSeverity(BeamFnApi.LogEntry.Severity.Enum.DEBUG) .setMessage("Message") .setThread("12345") .setTimestamp(Timestamp.newBuilder().setSeconds(1234567).setNanos(890000000).build()) .setLogLocation("LoggerName") .build(); private static final BeamFnApi.LogEntry TEST_ENTRY_WITH_EXCEPTION = BeamFnApi.LogEntry.newBuilder() .setSeverity(BeamFnApi.LogEntry.Severity.Enum.WARN) .setMessage("MessageWithException") .setTrace(getStackTraceAsString(TEST_RECORD_WITH_EXCEPTION.getThrown())) .setThread("12345") .setTimestamp(Timestamp.newBuilder().setSeconds(1234567).setNanos(890000000).build()) .setLogLocation("LoggerName") .build(); @Rule public ExpectedException thrown = ExpectedException.none(); @Test @Test public void testWhenServerFailsThatClientIsAbleToCleanup() throws Exception { Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); outboundObserver.onError( Status.INTERNAL.withDescription("TEST ERROR").asException()); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); Logger rootLogger = null; Logger configuredLogger = null; try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); rootLogger = LogManager.getLogManager().getLogger(""); configuredLogger = LogManager.getLogManager().getLogger("ConfiguredLogger"); thrown.expectMessage("TEST ERROR"); client.close(); } finally { assertNotNull("rootLogger should be initialized before exception", rootLogger); assertNotNull("configuredLogger should be initialized before exception", rootLogger); assertEquals(Level.INFO, rootLogger.getLevel()); assertNull(configuredLogger.getLevel()); assertTrue(channel.isShutdown()); server.shutdownNow(); } } @Test public void testWhenServerHangsUpEarlyThatClientIsAbleCleanup() throws Exception { Collection<BeamFnApi.LogEntry> values = new ConcurrentLinkedQueue<>(); AtomicReference<StreamObserver<BeamFnApi.LogControl>> outboundServerObserver = new AtomicReference<>(); CallStreamObserver<BeamFnApi.LogEntry.List> inboundServerObserver = TestStreams.withOnNext( (BeamFnApi.LogEntry.List logEntries) -> values.addAll(logEntries.getLogEntriesList())) .build(); Endpoints.ApiServiceDescriptor apiServiceDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString()) .build(); Server server = InProcessServerBuilder.forName(apiServiceDescriptor.getUrl()) .addService( new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<BeamFnApi.LogControl> outboundObserver) { outboundServerObserver.set(outboundObserver); outboundObserver.onCompleted(); return inboundServerObserver; } }) .build(); server.start(); ManagedChannel channel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build(); try { BeamFnLoggingClient client = new BeamFnLoggingClient( PipelineOptionsFactory.fromArgs( new String[] { "--defaultSdkHarnessLogLevel=OFF", "--sdkHarnessLogLevelOverrides={\"ConfiguredLogger\": \"DEBUG\"}" }) .create(), apiServiceDescriptor, (Endpoints.ApiServiceDescriptor descriptor) -> channel); Logger rootLogger = LogManager.getLogManager().getLogger(""); Logger configuredLogger = LogManager.getLogManager().getLogger("ConfiguredLogger"); client.close(); assertEquals(Level.INFO, rootLogger.getLevel()); assertNull(configuredLogger.getLevel()); } finally { assertTrue(channel.isShutdown()); server.shutdownNow(); } } }
It turned out map initializer literal could contain string keys. Since both map and record literals are the same, we have to check and act accordingly. Specially when `map m = { foo: "value", "foo": "value" }`. Will update the code to handle to handle previous edge case.
public void visit(BLangRecordLiteral recordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs; keyValuePairs.forEach(kv -> { analyzeExpr(kv.valueExpr); }); Set<String> names = new HashSet<>(); for (BLangRecordLiteral.BLangRecordKeyValue recFieldDecl : keyValuePairs) { BLangExpression keyExpr = recFieldDecl.getKey(); if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef keyRef = (BLangSimpleVarRef) keyExpr; if (names.contains(keyRef.variableName.value)) { TypeKind assigneeType = recordLiteral.parent.type.getKind(); this.dlog.error(keyExpr.pos, DiagnosticCode.DUPLICATE_RECORD_LITERAL, assigneeType.typeName(), keyRef); } names.add(keyRef.variableName.value); } } }
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
public void visit(BLangRecordLiteral recordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs; keyValuePairs.forEach(kv -> { analyzeExpr(kv.valueExpr); }); Set<Object> names = new TreeSet<>((l, r) -> l.equals(r) ? 0 : 1); for (BLangRecordLiteral.BLangRecordKeyValue recFieldDecl : keyValuePairs) { BLangExpression key = recFieldDecl.getKey(); if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef keyRef = (BLangSimpleVarRef) key; if (names.contains(keyRef.variableName.value)) { String assigneeType = recordLiteral.parent.type.getKind().typeName(); this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyRef); } names.add(keyRef.variableName.value); } else if (key.getKind() == NodeKind.LITERAL) { BLangLiteral keyLiteral = (BLangLiteral) key; if (names.contains(keyLiteral.value)) { String assigneeType = recordLiteral.parent.type.getKind().typeName(); this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyLiteral); } names.add(keyLiteral.value); } } }
class CodeAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY = new CompilerContext.Key<>(); private int loopCount; private int transactionCount; private boolean statementReturns; private boolean lastStatement; private boolean withinRetryBlock; private int forkJoinCount; private int workerCount; private SymbolTable symTable; private Types types; private BLangDiagnosticLog dlog; private TypeChecker typeChecker; private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>(); private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> transactionWithinHandlerCheckStack = new Stack<>(); private BLangNode parent; private Names names; private SymbolEnv env; public static CodeAnalyzer getInstance(CompilerContext context) { CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY); if (codeGenerator == null) { codeGenerator = new CodeAnalyzer(context); } return codeGenerator; } public CodeAnalyzer(CompilerContext context) { context.put(CODE_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeChecker = TypeChecker.getInstance(context); this.names = Names.getInstance(context); } private void resetFunction() { this.resetStatementReturns(); } private void resetStatementReturns() { this.statementReturns = false; } private void resetLastStatement() { this.lastStatement = false; } public BLangPackage analyze(BLangPackage pkgNode) { pkgNode.accept(this); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) { return; } parent = pkgNode; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgNode.symbol); pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv)); pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE); parent = null; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; this.env = prevEnv; } @Override public void visit(BLangCompilationUnit compUnitNode) { compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env)); } public void visit(BLangTypeDefinition typeDefinition) { if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) { analyzeNode(typeDefinition.typeNode, env); } if (!Symbols.isPublic(typeDefinition.symbol) || typeDefinition.symbol.type != null && TypeKind.FINITE.equals(typeDefinition.symbol.type.getKind())) { return; } analyseType(typeDefinition.symbol.type, typeDefinition.pos); } @Override public void visit(BLangFunction funcNode) { if (funcNode.symbol.isTransactionHandler) { transactionWithinHandlerCheckStack.push(true); } this.returnWithintransactionCheckStack.push(true); this.doneWithintransactionCheckStack.push(true); this.validateMainFunction(funcNode); SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); this.visitInvocable(funcNode, funcEnv); this.returnWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); if (funcNode.symbol.isTransactionHandler) { transactionWithinHandlerCheckStack.pop(); } } private void visitInvocable(BLangInvokableNode invNode, SymbolEnv invokableEnv) { this.resetFunction(); try { this.initNewWorkerActionSystem(); if (Symbols.isNative(invNode.symbol)) { return; } boolean invokableReturns = invNode.returnTypeNode.type != symTable.nilType; if (invNode.workers.isEmpty()) { if (isPublicInvokableNode(invNode)) { analyzeNode(invNode.returnTypeNode, invokableEnv); } /* the body can be null in the case of Object type function declarations */ if (invNode.body != null) { analyzeNode(invNode.body, invokableEnv); /* the function returns, but none of the statements surely returns */ if (invokableReturns && !this.statementReturns) { this.dlog.error(invNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN, invNode.getKind().toString().toLowerCase()); } } } else { boolean workerReturns = false; for (BLangWorker worker : invNode.workers) { analyzeNode(worker, invokableEnv); workerReturns = workerReturns || this.statementReturns; this.resetStatementReturns(); } if (invokableReturns && !workerReturns) { this.dlog.error(invNode.pos, DiagnosticCode.ATLEAST_ONE_WORKER_MUST_RETURN, invNode.getKind().toString().toLowerCase()); } } } finally { this.finalizeCurrentWorkerActionSystem(); } } private boolean isPublicInvokableNode(BLangInvokableNode invNode) { return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) || Symbols.isPublic(invNode.symbol.owner)); } @Override public void visit(BLangForkJoin forkJoin) { this.forkJoinCount++; this.initNewWorkerActionSystem(); this.checkStatementExecutionValidity(forkJoin); forkJoin.workers.forEach(e -> analyzeNode(e, env)); analyzeNode(forkJoin.joinedBody, env); if (forkJoin.timeoutBody != null) { boolean joinReturns = this.statementReturns; this.resetStatementReturns(); analyzeNode(forkJoin.timeoutBody, env); this.statementReturns = joinReturns && this.statementReturns; } this.checkForkJoinWorkerCount(forkJoin); this.finalizeCurrentWorkerActionSystem(); this.forkJoinCount--; analyzeExpr(forkJoin.timeoutExpression); } private boolean inForkJoin() { return this.forkJoinCount > 0; } private void checkForkJoinWorkerCount(BLangForkJoin forkJoin) { if (forkJoin.joinType == ForkJoinNode.JoinType.SOME) { int wc = forkJoin.joinedWorkers.size(); if (wc == 0) { wc = forkJoin.workers.size(); } if (forkJoin.joinedWorkerCount > wc) { this.dlog.error(forkJoin.pos, DiagnosticCode.FORK_JOIN_INVALID_WORKER_COUNT); } } } private boolean inWorker() { return this.workerCount > 0; } @Override public void visit(BLangWorker worker) { this.workerCount++; this.workerActionSystemStack.peek().startWorkerActionStateMachine(worker.name.value, worker.pos); analyzeNode(worker.body, env); this.workerActionSystemStack.peek().endWorkerActionStateMachine(); this.workerCount--; } @Override public void visit(BLangEndpoint endpointNode) { } @Override public void visit(BLangTransaction transactionNode) { this.checkStatementExecutionValidity(transactionNode); if (!isValidTransactionBlock()) { this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER); return; } this.loopWithintransactionCheckStack.push(false); this.returnWithintransactionCheckStack.push(false); this.doneWithintransactionCheckStack.push(false); this.transactionCount++; analyzeNode(transactionNode.transactionBody, env); this.transactionCount--; this.resetLastStatement(); if (transactionNode.onRetryBody != null) { this.withinRetryBlock = true; analyzeNode(transactionNode.onRetryBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinRetryBlock = false; } this.returnWithintransactionCheckStack.pop(); this.loopWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); analyzeExpr(transactionNode.retryCount); analyzeExpr(transactionNode.onCommitFunction); analyzeExpr(transactionNode.onAbortFunction); } @Override public void visit(BLangAbort abortNode) { if (this.transactionCount == 0) { this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } @Override public void visit(BLangDone doneNode) { if (checkReturnValidityInTransaction()) { this.dlog.error(doneNode.pos, DiagnosticCode.DONE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } @Override public void visit(BLangRetry retryNode) { if (this.transactionCount == 0) { this.dlog.error(retryNode.pos, DiagnosticCode.FAIL_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } private void checkUnreachableCode(BLangStatement stmt) { if (this.statementReturns) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetStatementReturns(); } if (lastStatement) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetLastStatement(); } } private void checkStatementExecutionValidity(BLangStatement stmt) { this.checkUnreachableCode(stmt); } @Override public void visit(BLangBlockStmt blockNode) { final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv)); this.resetLastStatement(); } @Override public void visit(BLangReturn returnStmt) { this.checkStatementExecutionValidity(returnStmt); if (this.env.enclInvokable.getKind() == NodeKind.RESOURCE) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_STMT_NOT_VALID_IN_RESOURCE); return; } if (this.inForkJoin() && this.inWorker()) { this.dlog.error(returnStmt.pos, DiagnosticCode.FORK_JOIN_WORKER_CANNOT_RETURN); return; } if (checkReturnValidityInTransaction()) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.statementReturns = true; analyzeExpr(returnStmt.expr); } @Override public void visit(BLangIf ifStmt) { this.checkStatementExecutionValidity(ifStmt); analyzeNode(ifStmt.body, env); boolean ifStmtReturns = this.statementReturns; this.resetStatementReturns(); if (ifStmt.elseStmt != null) { analyzeNode(ifStmt.elseStmt, env); this.statementReturns = ifStmtReturns && this.statementReturns; } analyzeExpr(ifStmt.expr); } @Override public void visit(BLangMatch matchStmt) { this.returnWithintransactionCheckStack.push(true); boolean unmatchedExprTypesAvailable = false; analyzeExpr(matchStmt.expr); List<BType> unmatchedExprTypes = new ArrayList<>(); for (BType exprType : matchStmt.exprTypes) { boolean assignable = false; for (BLangMatchStmtPatternClause pattern : matchStmt.patternClauses) { BType patternType = pattern.variable.type; if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) { return; } assignable = this.types.isAssignable(exprType, patternType); if (assignable) { pattern.matchedTypesDirect.add(exprType); break; } else if (exprType.tag == TypeTags.ANY) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD) && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) { pattern.matchedTypesDirect.add(exprType); break; } else { } } if (!assignable) { unmatchedExprTypes.add(exprType); } } if (!unmatchedExprTypes.isEmpty()) { unmatchedExprTypesAvailable = true; dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN, unmatchedExprTypes); } boolean matchedPatternsAvailable = false; for (int i = matchStmt.patternClauses.size() - 1; i >= 0; i--) { BLangMatchStmtPatternClause pattern = matchStmt.patternClauses.get(i); if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) { if (matchedPatternsAvailable) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); } else { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } else { matchedPatternsAvailable = true; } } if (!unmatchedExprTypesAvailable) { this.checkStatementExecutionValidity(matchStmt); boolean matchStmtReturns = true; for (BLangMatchStmtPatternClause patternClause : matchStmt.patternClauses) { analyzeNode(patternClause.body, env); matchStmtReturns = matchStmtReturns && this.statementReturns; this.resetStatementReturns(); } this.statementReturns = matchStmtReturns; } this.returnWithintransactionCheckStack.pop(); } @Override public void visit(BLangForeach foreach) { this.loopWithintransactionCheckStack.push(true); this.checkStatementExecutionValidity(foreach); this.loopCount++; foreach.body.stmts.forEach(e -> analyzeNode(e, env)); this.loopCount--; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(foreach.collection); analyzeExprs(foreach.varRefs); } @Override public void visit(BLangWhile whileNode) { this.loopWithintransactionCheckStack.push(true); this.checkStatementExecutionValidity(whileNode); this.loopCount++; whileNode.body.stmts.forEach(e -> analyzeNode(e, env)); this.loopCount--; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(whileNode.expr); } @Override public void visit(BLangLock lockNode) { this.checkStatementExecutionValidity(lockNode); lockNode.body.stmts.forEach(e -> analyzeNode(e, env)); } @Override public void visit(BLangContinue continueNode) { this.checkStatementExecutionValidity(continueNode); if (this.loopCount == 0) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); if (pkgEnv == null) { return; } analyzeNode(pkgEnv.node, env); } public void visit(BLangXMLNS xmlnsNode) { /* ignore */ } public void visit(BLangService serviceNode) { SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, env); serviceNode.resources.forEach(res -> analyzeNode(res, serviceEnv)); } public void visit(BLangResource resourceNode) { SymbolEnv resourceEnv = SymbolEnv.createResourceActionSymbolEnv(resourceNode, resourceNode.symbol.scope, env); this.visitInvocable(resourceNode, resourceEnv); } public void visit(BLangForever foreverStatement) { this.lastStatement = true; } public void visit(BLangAction actionNode) { /* not used, covered with functions */ } public void visit(BLangObjectTypeNode objectTypeNode) { if (objectTypeNode.isFieldAnalyseRequired && Symbols.isPublic(objectTypeNode.symbol)) { objectTypeNode.fields.stream() .filter(field -> (Symbols.isPublic(field.symbol))) .forEach(field -> analyzeNode(field, this.env)); } objectTypeNode.functions.forEach(e -> this.analyzeNode(e, this.env)); } private void analyseType(BType type, DiagnosticPos pos) { if (type == null || type.tsymbol == null) { return; } BSymbol symbol = type.tsymbol; if (Symbols.isPrivate(symbol)) { dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name); } } public void visit(BLangRecordTypeNode recordTypeNode) { if (recordTypeNode.isFieldAnalyseRequired && Symbols.isPublic(recordTypeNode.symbol)) { recordTypeNode.fields.stream() .filter(field -> (Symbols.isPublic(field.symbol))) .forEach(field -> analyzeNode(field, this.env)); } } public void visit(BLangVariable varNode) { analyzeExpr(varNode.expr); if (Objects.isNull(varNode.symbol) || !Symbols.isPublic(varNode.symbol)) { return; } int ownerSymTag = this.env.scope.owner.tag; if (((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) || (varNode.type != null && varNode.parent != null && NodeKind.FUNCTION.equals(varNode.parent.getKind()))) { analyseType(varNode.type, varNode.pos); } } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { /* ignore */ } public void visit(BLangAnnotationAttachment annAttachmentNode) { /* ignore */ } public void visit(BLangVariableDef varDefNode) { this.checkStatementExecutionValidity(varDefNode); analyzeNode(varDefNode.var, env); } public void visit(BLangCompoundAssignment compoundAssignment) { this.checkStatementExecutionValidity(compoundAssignment); analyzeExpr(compoundAssignment.varRef); analyzeExpr(compoundAssignment.expr); } public void visit(BLangPostIncrement postIncrement) { this.checkStatementExecutionValidity(postIncrement); analyzeExpr(postIncrement.varRef); analyzeExpr(postIncrement.increment); } public void visit(BLangAssignment assignNode) { this.checkStatementExecutionValidity(assignNode); analyzeExpr(assignNode.varRef); analyzeExpr(assignNode.expr); } @Override public void visit(BLangTupleDestructure stmt) { this.checkStatementExecutionValidity(stmt); analyzeExprs(stmt.varRefs); analyzeExpr(stmt.expr); } public void visit(BLangBreak breakNode) { this.checkStatementExecutionValidity(breakNode); if (this.loopCount == 0) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangThrow throwNode) { this.checkStatementExecutionValidity(throwNode); this.statementReturns = true; analyzeExpr(throwNode.expr); } public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.checkStatementExecutionValidity(xmlnsStmtNode); } public void visit(BLangExpressionStmt exprStmtNode) { this.checkStatementExecutionValidity(exprStmtNode); analyzeExpr(exprStmtNode.expr); validateExprStatementExpression(exprStmtNode); } private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) { BLangExpression expr = exprStmtNode.expr; while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR) { if (expr.getKind() == NodeKind.MATCH_EXPRESSION) { expr = ((BLangMatchExpression) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_EXPR) { expr = ((BLangCheckedExpr) expr).expr; } } if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.AWAIT_EXPR) { return; } if (expr.type == symTable.nilType) { dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT); } } public void visit(BLangTryCatchFinally tryNode) { this.checkStatementExecutionValidity(tryNode); analyzeNode(tryNode.tryBody, env); boolean tryCatchReturns = this.statementReturns; this.resetStatementReturns(); List<BType> caughtTypes = new ArrayList<>(); for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) { if (caughtTypes.contains(bLangCatch.getParameter().type)) { dlog.error(bLangCatch.getParameter().pos, DiagnosticCode.DUPLICATED_ERROR_CATCH, bLangCatch.getParameter().type); } caughtTypes.add(bLangCatch.getParameter().type); analyzeNode(bLangCatch.body, env); tryCatchReturns = tryCatchReturns && this.statementReturns; this.resetStatementReturns(); } if (tryNode.finallyBody != null) { analyzeNode(tryNode.finallyBody, env); this.statementReturns = tryCatchReturns || this.statementReturns; } else { this.statementReturns = tryCatchReturns; } } public void visit(BLangCatch catchNode) { /* ignore */ } public void visit(BLangWorkerSend workerSendNode) { this.checkStatementExecutionValidity(workerSendNode); if (workerSendNode.isChannel) { analyzeExpr(workerSendNode.expr); if (workerSendNode.keyExpr != null) { analyzeExpr(workerSendNode.keyExpr); } return; } if (!this.inWorker()) { return; } this.workerActionSystemStack.peek().addWorkerAction(workerSendNode); analyzeExpr(workerSendNode.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { this.checkStatementExecutionValidity(workerReceiveNode); if (workerReceiveNode.isChannel) { analyzeExpr(workerReceiveNode.expr); if (workerReceiveNode.keyExpr != null) { analyzeExpr(workerReceiveNode.keyExpr); } return; } if (!this.inWorker()) { return; } this.workerActionSystemStack.peek().addWorkerAction(workerReceiveNode); analyzeExpr(workerReceiveNode.expr); } public void visit(BLangLiteral literalExpr) { /* ignore */ } public void visit(BLangArrayLiteral arrayLiteral) { analyzeExprs(arrayLiteral.exprs); } public void visit(BLangTableLiteral tableLiteral) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { /* ignore */ } public void visit(BLangFieldBasedAccess fieldAccessExpr) { /* ignore */ } public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeExpr(indexAccessExpr.indexExpr); analyzeExpr(indexAccessExpr.expr); if (indexAccessExpr.indexExpr.type == null || indexAccessExpr.indexExpr.type.tag == TypeTags.ERROR) { return; } if (indexAccessExpr.expr.type.tag == TypeTags.ARRAY && indexAccessExpr.indexExpr.getKind() == NodeKind.LITERAL) { BArrayType bArrayType = (BArrayType) indexAccessExpr.expr.type; BLangLiteral indexExpr = (BLangLiteral) indexAccessExpr.indexExpr; Long indexVal = (Long) indexExpr.getValue(); if (bArrayType.state == BArrayState.CLOSED_SEALED && (bArrayType.size <= indexVal)) { dlog.error(indexExpr.pos, DiagnosticCode.ARRAY_INDEX_OUT_OF_RANGE, indexVal, bArrayType.size); } } } public void visit(BLangInvocation invocationExpr) { analyzeExpr(invocationExpr.expr); analyzeExprs(invocationExpr.requiredArgs); analyzeExprs(invocationExpr.namedArgs); analyzeExprs(invocationExpr.restArgs); checkDuplicateNamedArgs(invocationExpr.namedArgs); if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) { BSymbol funcSymbol = invocationExpr.symbol; if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) { dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION, names.fromIdNode(invocationExpr.name)); } } if (invocationExpr.actionInvocation) { validateActionInvocation(invocationExpr.pos, invocationExpr); } } private void validateActionInvocation(DiagnosticPos pos, BLangNode bLangNode) { BLangNode parent = bLangNode.parent; while (parent != null) { final NodeKind kind = parent.getKind(); if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE) { return; } else if (kind == NodeKind.CHECK_EXPR || kind == NodeKind.MATCH_EXPRESSION) { parent = parent.parent; continue; } else if (kind == NodeKind.ELVIS_EXPR && ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) { parent = parent.parent; continue; } break; } dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } public void visit(BLangTypeInit cIExpr) { analyzeExprs(cIExpr.argsExpr); analyzeExpr(cIExpr.objectInitInvocation); } public void visit(BLangTernaryExpr ternaryExpr) { analyzeExpr(ternaryExpr.expr); analyzeExpr(ternaryExpr.thenExpr); analyzeExpr(ternaryExpr.elseExpr); } public void visit(BLangAwaitExpr awaitExpr) { analyzeExpr(awaitExpr.expr); } public void visit(BLangBinaryExpr binaryExpr) { analyzeExpr(binaryExpr.lhsExpr); analyzeExpr(binaryExpr.rhsExpr); } public void visit(BLangElvisExpr elvisExpr) { analyzeExpr(elvisExpr.lhsExpr); analyzeExpr(elvisExpr.rhsExpr); } @Override public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) { analyzeExprs(bracedOrTupleExpr.expressions); } public void visit(BLangUnaryExpr unaryExpr) { analyzeExpr(unaryExpr.expr); } public void visit(BLangTypedescExpr accessExpr) { /* ignore */ } public void visit(BLangTypeConversionExpr conversionExpr) { analyzeExpr(conversionExpr.expr); } public void visit(BLangXMLQName xmlQName) { /* ignore */ } public void visit(BLangXMLAttribute xmlAttribute) { analyzeExpr(xmlAttribute.name); analyzeExpr(xmlAttribute.value); } public void visit(BLangXMLElementLiteral xmlElementLiteral) { analyzeExpr(xmlElementLiteral.startTagName); analyzeExpr(xmlElementLiteral.endTagName); analyzeExprs(xmlElementLiteral.attributes); analyzeExprs(xmlElementLiteral.children); } public void visit(BLangXMLTextLiteral xmlTextLiteral) { analyzeExprs(xmlTextLiteral.textFragments); } public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { analyzeExprs(xmlCommentLiteral.textFragments); } public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { analyzeExprs(xmlProcInsLiteral.dataFragments); analyzeExpr(xmlProcInsLiteral.target); } public void visit(BLangXMLQuotedString xmlQuotedString) { analyzeExprs(xmlQuotedString.textFragments); } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { analyzeExprs(stringTemplateLiteral.exprs); } public void visit(BLangLambdaFunction bLangLambdaFunction) { /* ignore */ } public void visit(BLangArrowFunction bLangArrowFunction) { /* ignore */ } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { analyzeExpr(xmlAttributeAccessExpr.expr); analyzeExpr(xmlAttributeAccessExpr.indexExpr); } public void visit(BLangIntRangeExpression intRangeExpression) { analyzeExpr(intRangeExpression.startExpr); analyzeExpr(intRangeExpression.endExpr); } public void visit(BLangValueType valueType) { /* ignore */ } public void visit(BLangArrayType arrayType) { /* ignore */ } public void visit(BLangBuiltInRefTypeNode builtInRefType) { /* ignore */ } public void visit(BLangConstrainedType constrainedType) { /* ignore */ } public void visit(BLangUserDefinedType userDefinedType) { analyseType(userDefinedType.type, userDefinedType.pos); } public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env)); } public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env)); } public void visit(BLangFunctionTypeNode functionTypeNode) { analyseType(functionTypeNode.type, functionTypeNode.pos); } @Override public void visit(BLangTableQueryExpression tableQueryExpression) { /* ignore */ } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { /* ignore */ } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { /* ignore */ } @Override public void visit(BLangMatchExpression bLangMatchExpression) { analyzeExpr(bLangMatchExpression.expr); List<BType> exprTypes; if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.memberTypes); } else { exprTypes = Lists.of(bLangMatchExpression.expr.type); } List<BType> unmatchedExprTypes = new ArrayList<>(); for (BType exprType : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternType = pattern.variable.type; if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) { return; } assignable = this.types.isAssignable(exprType, patternType); if (assignable) { pattern.matchedTypesDirect.add(exprType); break; } else if (exprType.tag == TypeTags.ANY) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD) && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) { pattern.matchedTypesDirect.add(exprType); break; } else { } } if (!assignable && !this.types.isAssignable(exprType, bLangMatchExpression.type)) { unmatchedExprTypes.add(exprType); } } if (!unmatchedExprTypes.isEmpty()) { dlog.error(bLangMatchExpression.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN, unmatchedExprTypes); } boolean matchedPatternsAvailable = false; for (int i = bLangMatchExpression.patternClauses.size() - 1; i >= 0; i--) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) { if (matchedPatternsAvailable) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); } else { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } else { matchedPatternsAvailable = true; } } } @Override public void visit(BLangCheckedExpr checkedExpr) { } private <E extends BLangExpression> void analyzeExpr(E node) { if (node == null) { return; } BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; checkAccess(node); } @Override public void visit(BLangScope scopeNode) { this.checkStatementExecutionValidity(scopeNode); scopeNode.getScopeBody().accept(this); this.resetLastStatement(); visit(scopeNode.compensationFunction); } @Override public void visit(BLangCompensate compensateNode) { this.checkStatementExecutionValidity(compensateNode); } /** * This method checks for private symbols being accessed or used outside of package and|or private symbols being * used in public fields of objects/records and will fail those occurrences. * * @param node expression node to analyse */ private <E extends BLangExpression> void checkAccess(E node) { if (node.type != null) { checkAccessSymbol(node.type.tsymbol, node.pos); } if (node instanceof BLangInvocation) { BLangInvocation bLangInvocation = (BLangInvocation) node; checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos); } } private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) { if (symbol == null) { return; } if (env.enclPkg.symbol.pkgID != symbol.pkgID && Symbols.isPrivate(symbol)) { dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name); } } private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.get(i).accept(this); } } private void initNewWorkerActionSystem() { this.workerActionSystemStack.push(new WorkerActionSystem()); } private void finalizeCurrentWorkerActionSystem() { WorkerActionSystem was = this.workerActionSystemStack.pop(); this.validateWorkerInteractions(was); } private static boolean isWorkerSend(BLangStatement action) { return action.getKind() == NodeKind.WORKER_SEND; } private static boolean isWorkerForkSend(BLangStatement action) { return ((BLangWorkerSend) action).isForkJoinSend; } private String extractWorkerId(BLangStatement action) { if (isWorkerSend(action)) { return ((BLangWorkerSend) action).workerIdentifier.value; } else { return ((BLangWorkerReceive) action).workerIdentifier.value; } } private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) { this.validateForkJoinSendsToFork(workerActionSystem); BLangStatement currentAction; WorkerActionStateMachine currentSM; String currentWorkerId; boolean systemRunning; do { systemRunning = false; for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) { currentWorkerId = entry.getKey(); currentSM = entry.getValue(); if (currentSM.done()) { continue; } currentAction = currentSM.currentAction(); if (isWorkerSend(currentAction)) { if (isWorkerForkSend(currentAction)) { currentSM.next(); systemRunning = true; } else { WorkerActionStateMachine otherSM = workerActionSystem.get(this.extractWorkerId(currentAction)); if (otherSM.currentIsReceive(currentWorkerId)) { this.validateWorkerActionParameters((BLangWorkerSend) currentAction, (BLangWorkerReceive) otherSM.currentAction()); otherSM.next(); currentSM.next(); systemRunning = true; } } } } } while (systemRunning); if (!workerActionSystem.everyoneDone()) { this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem); } } private void validateForkJoinSendsToFork(WorkerActionSystem workerActionSystem) { for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) { this.validateForkJoinSendsToFork(entry.getValue()); } } private void validateForkJoinSendsToFork(WorkerActionStateMachine sm) { boolean sentToFork = false; for (BLangStatement action : sm.actions) { if (isWorkerSend(action) && isWorkerForkSend(action)) { if (sentToFork) { this.dlog.error(action.pos, DiagnosticCode.INVALID_MULTIPLE_FORK_JOIN_SEND); } else { sentToFork = true; } } } } private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) { this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION, workerActionSystem.toString()); } private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) { this.typeChecker.checkExpr(send.expr, send.env, receive.expr.type); } private boolean checkNextBreakValidityInTransaction() { return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0; } private boolean checkReturnValidityInTransaction() { return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek()) && transactionCount > 0; } private boolean isValidTransactionBlock() { return (this.transactionWithinHandlerCheckStack.empty() || !this.transactionWithinHandlerCheckStack.peek()) && !this.withinRetryBlock; } private void validateMainFunction(BLangFunction funcNode) { if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) { return; } if (!Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC); } if (!(funcNode.symbol.retType.tag == TypeTags.NIL || funcNode.symbol.retType.tag == TypeTags.INT)) { this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.INVALID_RETURN_WITH_MAIN, funcNode.symbol.retType); } } private void checkDuplicateNamedArgs(List<BLangExpression> args) { List<BLangIdentifier> existingArgs = new ArrayList<>(); args.forEach(arg -> { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; if (existingArgs.contains(namedArg.name)) { dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name); } existingArgs.add(namedArg.name); }); } /** * This class contains the state machines for a set of workers. */ private static class WorkerActionSystem { public Map<String, WorkerActionStateMachine> workerActionStateMachines = new LinkedHashMap<>(); private WorkerActionStateMachine currentSM; private String currentWorkerId; public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos) { this.currentWorkerId = workerId; this.currentSM = new WorkerActionStateMachine(pos); } public void endWorkerActionStateMachine() { this.workerActionStateMachines.put(this.currentWorkerId, this.currentSM); } public void addWorkerAction(BLangStatement action) { this.currentSM.actions.add(action); } public WorkerActionStateMachine get(String workerId) { return this.workerActionStateMachines.get(workerId); } public Set<Map.Entry<String, WorkerActionStateMachine>> entrySet() { return this.workerActionStateMachines.entrySet(); } public boolean everyoneDone() { return this.workerActionStateMachines.values().stream().allMatch(WorkerActionStateMachine::done); } public DiagnosticPos getRootPosition() { return this.workerActionStateMachines.values().iterator().next().pos; } @Override public String toString() { return this.workerActionStateMachines.toString(); } } /** * This class represents a state machine to maintain the state of the send/receive * actions of a worker. */ private static class WorkerActionStateMachine { private static final String WORKER_SM_FINISHED = "FINISHED"; public int currentState; public List<BLangStatement> actions = new ArrayList<>(); public DiagnosticPos pos; public WorkerActionStateMachine(DiagnosticPos pos) { this.pos = pos; } public boolean done() { return this.actions.size() == this.currentState; } public BLangStatement currentAction() { return this.actions.get(this.currentState); } public boolean currentIsReceive(String sourceWorkerId) { if (this.done()) { return false; } BLangStatement action = this.currentAction(); return !isWorkerSend(action) && ((BLangWorkerReceive) action). workerIdentifier.value.equals(sourceWorkerId); } public void next() { this.currentState++; } @Override public String toString() { if (this.done()) { return WORKER_SM_FINISHED; } else { BLangStatement action = this.currentAction(); if (isWorkerSend(action)) { return ((BLangWorkerSend) action).toActionString(); } else { return ((BLangWorkerReceive) action).toActionString(); } } } } }
class CodeAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY = new CompilerContext.Key<>(); private int loopCount; private int transactionCount; private boolean statementReturns; private boolean lastStatement; private boolean withinRetryBlock; private int forkJoinCount; private int workerCount; private SymbolTable symTable; private Types types; private BLangDiagnosticLog dlog; private TypeChecker typeChecker; private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>(); private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> transactionWithinHandlerCheckStack = new Stack<>(); private BLangNode parent; private Names names; private SymbolEnv env; public static CodeAnalyzer getInstance(CompilerContext context) { CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY); if (codeGenerator == null) { codeGenerator = new CodeAnalyzer(context); } return codeGenerator; } public CodeAnalyzer(CompilerContext context) { context.put(CODE_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeChecker = TypeChecker.getInstance(context); this.names = Names.getInstance(context); } private void resetFunction() { this.resetStatementReturns(); } private void resetStatementReturns() { this.statementReturns = false; } private void resetLastStatement() { this.lastStatement = false; } public BLangPackage analyze(BLangPackage pkgNode) { pkgNode.accept(this); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) { return; } parent = pkgNode; SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgNode.symbol); pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv)); pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE); parent = null; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; this.env = prevEnv; } @Override public void visit(BLangCompilationUnit compUnitNode) { compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env)); } public void visit(BLangTypeDefinition typeDefinition) { if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) { analyzeNode(typeDefinition.typeNode, env); } if (!Symbols.isPublic(typeDefinition.symbol) || typeDefinition.symbol.type != null && TypeKind.FINITE.equals(typeDefinition.symbol.type.getKind())) { return; } analyseType(typeDefinition.symbol.type, typeDefinition.pos); } @Override public void visit(BLangFunction funcNode) { if (funcNode.symbol.isTransactionHandler) { transactionWithinHandlerCheckStack.push(true); } this.returnWithintransactionCheckStack.push(true); this.doneWithintransactionCheckStack.push(true); this.validateMainFunction(funcNode); SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); this.visitInvocable(funcNode, funcEnv); this.returnWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); if (funcNode.symbol.isTransactionHandler) { transactionWithinHandlerCheckStack.pop(); } } private void visitInvocable(BLangInvokableNode invNode, SymbolEnv invokableEnv) { this.resetFunction(); try { this.initNewWorkerActionSystem(); if (Symbols.isNative(invNode.symbol)) { return; } boolean invokableReturns = invNode.returnTypeNode.type != symTable.nilType; if (invNode.workers.isEmpty()) { if (isPublicInvokableNode(invNode)) { analyzeNode(invNode.returnTypeNode, invokableEnv); } /* the body can be null in the case of Object type function declarations */ if (invNode.body != null) { analyzeNode(invNode.body, invokableEnv); /* the function returns, but none of the statements surely returns */ if (invokableReturns && !this.statementReturns) { this.dlog.error(invNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN, invNode.getKind().toString().toLowerCase()); } } } else { boolean workerReturns = false; for (BLangWorker worker : invNode.workers) { analyzeNode(worker, invokableEnv); workerReturns = workerReturns || this.statementReturns; this.resetStatementReturns(); } if (invokableReturns && !workerReturns) { this.dlog.error(invNode.pos, DiagnosticCode.ATLEAST_ONE_WORKER_MUST_RETURN, invNode.getKind().toString().toLowerCase()); } } } finally { this.finalizeCurrentWorkerActionSystem(); } } private boolean isPublicInvokableNode(BLangInvokableNode invNode) { return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) || Symbols.isPublic(invNode.symbol.owner)); } @Override public void visit(BLangForkJoin forkJoin) { this.forkJoinCount++; this.initNewWorkerActionSystem(); this.checkStatementExecutionValidity(forkJoin); forkJoin.workers.forEach(e -> analyzeNode(e, env)); analyzeNode(forkJoin.joinedBody, env); if (forkJoin.timeoutBody != null) { boolean joinReturns = this.statementReturns; this.resetStatementReturns(); analyzeNode(forkJoin.timeoutBody, env); this.statementReturns = joinReturns && this.statementReturns; } this.checkForkJoinWorkerCount(forkJoin); this.finalizeCurrentWorkerActionSystem(); this.forkJoinCount--; analyzeExpr(forkJoin.timeoutExpression); } private boolean inForkJoin() { return this.forkJoinCount > 0; } private void checkForkJoinWorkerCount(BLangForkJoin forkJoin) { if (forkJoin.joinType == ForkJoinNode.JoinType.SOME) { int wc = forkJoin.joinedWorkers.size(); if (wc == 0) { wc = forkJoin.workers.size(); } if (forkJoin.joinedWorkerCount > wc) { this.dlog.error(forkJoin.pos, DiagnosticCode.FORK_JOIN_INVALID_WORKER_COUNT); } } } private boolean inWorker() { return this.workerCount > 0; } @Override public void visit(BLangWorker worker) { this.workerCount++; this.workerActionSystemStack.peek().startWorkerActionStateMachine(worker.name.value, worker.pos); analyzeNode(worker.body, env); this.workerActionSystemStack.peek().endWorkerActionStateMachine(); this.workerCount--; } @Override public void visit(BLangEndpoint endpointNode) { } @Override public void visit(BLangTransaction transactionNode) { this.checkStatementExecutionValidity(transactionNode); if (!isValidTransactionBlock()) { this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER); return; } this.loopWithintransactionCheckStack.push(false); this.returnWithintransactionCheckStack.push(false); this.doneWithintransactionCheckStack.push(false); this.transactionCount++; analyzeNode(transactionNode.transactionBody, env); this.transactionCount--; this.resetLastStatement(); if (transactionNode.onRetryBody != null) { this.withinRetryBlock = true; analyzeNode(transactionNode.onRetryBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinRetryBlock = false; } this.returnWithintransactionCheckStack.pop(); this.loopWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); analyzeExpr(transactionNode.retryCount); analyzeExpr(transactionNode.onCommitFunction); analyzeExpr(transactionNode.onAbortFunction); } @Override public void visit(BLangAbort abortNode) { if (this.transactionCount == 0) { this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } @Override public void visit(BLangDone doneNode) { if (checkReturnValidityInTransaction()) { this.dlog.error(doneNode.pos, DiagnosticCode.DONE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } @Override public void visit(BLangRetry retryNode) { if (this.transactionCount == 0) { this.dlog.error(retryNode.pos, DiagnosticCode.FAIL_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } private void checkUnreachableCode(BLangStatement stmt) { if (this.statementReturns) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetStatementReturns(); } if (lastStatement) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetLastStatement(); } } private void checkStatementExecutionValidity(BLangStatement stmt) { this.checkUnreachableCode(stmt); } @Override public void visit(BLangBlockStmt blockNode) { final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv)); this.resetLastStatement(); } @Override public void visit(BLangReturn returnStmt) { this.checkStatementExecutionValidity(returnStmt); if (this.env.enclInvokable.getKind() == NodeKind.RESOURCE) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_STMT_NOT_VALID_IN_RESOURCE); return; } if (this.inForkJoin() && this.inWorker()) { this.dlog.error(returnStmt.pos, DiagnosticCode.FORK_JOIN_WORKER_CANNOT_RETURN); return; } if (checkReturnValidityInTransaction()) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.statementReturns = true; analyzeExpr(returnStmt.expr); } @Override public void visit(BLangIf ifStmt) { this.checkStatementExecutionValidity(ifStmt); analyzeNode(ifStmt.body, env); boolean ifStmtReturns = this.statementReturns; this.resetStatementReturns(); if (ifStmt.elseStmt != null) { analyzeNode(ifStmt.elseStmt, env); this.statementReturns = ifStmtReturns && this.statementReturns; } analyzeExpr(ifStmt.expr); } @Override public void visit(BLangMatch matchStmt) { this.returnWithintransactionCheckStack.push(true); boolean unmatchedExprTypesAvailable = false; analyzeExpr(matchStmt.expr); List<BType> unmatchedExprTypes = new ArrayList<>(); for (BType exprType : matchStmt.exprTypes) { boolean assignable = false; for (BLangMatchStmtPatternClause pattern : matchStmt.patternClauses) { BType patternType = pattern.variable.type; if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) { return; } assignable = this.types.isAssignable(exprType, patternType); if (assignable) { pattern.matchedTypesDirect.add(exprType); break; } else if (exprType.tag == TypeTags.ANY) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD) && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) { pattern.matchedTypesDirect.add(exprType); break; } else { } } if (!assignable) { unmatchedExprTypes.add(exprType); } } if (!unmatchedExprTypes.isEmpty()) { unmatchedExprTypesAvailable = true; dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN, unmatchedExprTypes); } boolean matchedPatternsAvailable = false; for (int i = matchStmt.patternClauses.size() - 1; i >= 0; i--) { BLangMatchStmtPatternClause pattern = matchStmt.patternClauses.get(i); if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) { if (matchedPatternsAvailable) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); } else { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } else { matchedPatternsAvailable = true; } } if (!unmatchedExprTypesAvailable) { this.checkStatementExecutionValidity(matchStmt); boolean matchStmtReturns = true; for (BLangMatchStmtPatternClause patternClause : matchStmt.patternClauses) { analyzeNode(patternClause.body, env); matchStmtReturns = matchStmtReturns && this.statementReturns; this.resetStatementReturns(); } this.statementReturns = matchStmtReturns; } this.returnWithintransactionCheckStack.pop(); } @Override public void visit(BLangForeach foreach) { this.loopWithintransactionCheckStack.push(true); this.checkStatementExecutionValidity(foreach); this.loopCount++; foreach.body.stmts.forEach(e -> analyzeNode(e, env)); this.loopCount--; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(foreach.collection); analyzeExprs(foreach.varRefs); } @Override public void visit(BLangWhile whileNode) { this.loopWithintransactionCheckStack.push(true); this.checkStatementExecutionValidity(whileNode); this.loopCount++; whileNode.body.stmts.forEach(e -> analyzeNode(e, env)); this.loopCount--; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(whileNode.expr); } @Override public void visit(BLangLock lockNode) { this.checkStatementExecutionValidity(lockNode); lockNode.body.stmts.forEach(e -> analyzeNode(e, env)); } @Override public void visit(BLangContinue continueNode) { this.checkStatementExecutionValidity(continueNode); if (this.loopCount == 0) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); if (pkgEnv == null) { return; } analyzeNode(pkgEnv.node, env); } public void visit(BLangXMLNS xmlnsNode) { /* ignore */ } public void visit(BLangService serviceNode) { SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, env); serviceNode.resources.forEach(res -> analyzeNode(res, serviceEnv)); } public void visit(BLangResource resourceNode) { SymbolEnv resourceEnv = SymbolEnv.createResourceActionSymbolEnv(resourceNode, resourceNode.symbol.scope, env); this.visitInvocable(resourceNode, resourceEnv); } public void visit(BLangForever foreverStatement) { this.lastStatement = true; } public void visit(BLangAction actionNode) { /* not used, covered with functions */ } public void visit(BLangObjectTypeNode objectTypeNode) { if (objectTypeNode.isFieldAnalyseRequired && Symbols.isPublic(objectTypeNode.symbol)) { objectTypeNode.fields.stream() .filter(field -> (Symbols.isPublic(field.symbol))) .forEach(field -> analyzeNode(field, this.env)); } objectTypeNode.functions.forEach(e -> this.analyzeNode(e, this.env)); } private void analyseType(BType type, DiagnosticPos pos) { if (type == null || type.tsymbol == null) { return; } BSymbol symbol = type.tsymbol; if (Symbols.isPrivate(symbol)) { dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name); } } public void visit(BLangRecordTypeNode recordTypeNode) { if (recordTypeNode.isFieldAnalyseRequired && Symbols.isPublic(recordTypeNode.symbol)) { recordTypeNode.fields.stream() .filter(field -> (Symbols.isPublic(field.symbol))) .forEach(field -> analyzeNode(field, this.env)); } } public void visit(BLangVariable varNode) { analyzeExpr(varNode.expr); if (Objects.isNull(varNode.symbol) || !Symbols.isPublic(varNode.symbol)) { return; } int ownerSymTag = this.env.scope.owner.tag; if (((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) || (varNode.type != null && varNode.parent != null && NodeKind.FUNCTION.equals(varNode.parent.getKind()))) { analyseType(varNode.type, varNode.pos); } } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { /* ignore */ } public void visit(BLangAnnotationAttachment annAttachmentNode) { /* ignore */ } public void visit(BLangVariableDef varDefNode) { this.checkStatementExecutionValidity(varDefNode); analyzeNode(varDefNode.var, env); } public void visit(BLangCompoundAssignment compoundAssignment) { this.checkStatementExecutionValidity(compoundAssignment); analyzeExpr(compoundAssignment.varRef); analyzeExpr(compoundAssignment.expr); } public void visit(BLangPostIncrement postIncrement) { this.checkStatementExecutionValidity(postIncrement); analyzeExpr(postIncrement.varRef); analyzeExpr(postIncrement.increment); } public void visit(BLangAssignment assignNode) { this.checkStatementExecutionValidity(assignNode); analyzeExpr(assignNode.varRef); analyzeExpr(assignNode.expr); } @Override public void visit(BLangTupleDestructure stmt) { this.checkStatementExecutionValidity(stmt); analyzeExprs(stmt.varRefs); analyzeExpr(stmt.expr); } public void visit(BLangBreak breakNode) { this.checkStatementExecutionValidity(breakNode); if (this.loopCount == 0) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangThrow throwNode) { this.checkStatementExecutionValidity(throwNode); this.statementReturns = true; analyzeExpr(throwNode.expr); } public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.checkStatementExecutionValidity(xmlnsStmtNode); } public void visit(BLangExpressionStmt exprStmtNode) { this.checkStatementExecutionValidity(exprStmtNode); analyzeExpr(exprStmtNode.expr); validateExprStatementExpression(exprStmtNode); } private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) { BLangExpression expr = exprStmtNode.expr; while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR) { if (expr.getKind() == NodeKind.MATCH_EXPRESSION) { expr = ((BLangMatchExpression) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_EXPR) { expr = ((BLangCheckedExpr) expr).expr; } } if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.AWAIT_EXPR) { return; } if (expr.type == symTable.nilType) { dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT); } } public void visit(BLangTryCatchFinally tryNode) { this.checkStatementExecutionValidity(tryNode); analyzeNode(tryNode.tryBody, env); boolean tryCatchReturns = this.statementReturns; this.resetStatementReturns(); List<BType> caughtTypes = new ArrayList<>(); for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) { if (caughtTypes.contains(bLangCatch.getParameter().type)) { dlog.error(bLangCatch.getParameter().pos, DiagnosticCode.DUPLICATED_ERROR_CATCH, bLangCatch.getParameter().type); } caughtTypes.add(bLangCatch.getParameter().type); analyzeNode(bLangCatch.body, env); tryCatchReturns = tryCatchReturns && this.statementReturns; this.resetStatementReturns(); } if (tryNode.finallyBody != null) { analyzeNode(tryNode.finallyBody, env); this.statementReturns = tryCatchReturns || this.statementReturns; } else { this.statementReturns = tryCatchReturns; } } public void visit(BLangCatch catchNode) { /* ignore */ } public void visit(BLangWorkerSend workerSendNode) { this.checkStatementExecutionValidity(workerSendNode); if (workerSendNode.isChannel) { analyzeExpr(workerSendNode.expr); if (workerSendNode.keyExpr != null) { analyzeExpr(workerSendNode.keyExpr); } return; } if (!this.inWorker()) { return; } this.workerActionSystemStack.peek().addWorkerAction(workerSendNode); analyzeExpr(workerSendNode.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { this.checkStatementExecutionValidity(workerReceiveNode); if (workerReceiveNode.isChannel) { analyzeExpr(workerReceiveNode.expr); if (workerReceiveNode.keyExpr != null) { analyzeExpr(workerReceiveNode.keyExpr); } return; } if (!this.inWorker()) { return; } this.workerActionSystemStack.peek().addWorkerAction(workerReceiveNode); analyzeExpr(workerReceiveNode.expr); } public void visit(BLangLiteral literalExpr) { /* ignore */ } public void visit(BLangArrayLiteral arrayLiteral) { analyzeExprs(arrayLiteral.exprs); } public void visit(BLangTableLiteral tableLiteral) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { /* ignore */ } public void visit(BLangFieldBasedAccess fieldAccessExpr) { /* ignore */ } public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeExpr(indexAccessExpr.indexExpr); analyzeExpr(indexAccessExpr.expr); if (indexAccessExpr.indexExpr.type == null || indexAccessExpr.indexExpr.type.tag == TypeTags.ERROR) { return; } if (indexAccessExpr.expr.type.tag == TypeTags.ARRAY && indexAccessExpr.indexExpr.getKind() == NodeKind.LITERAL) { BArrayType bArrayType = (BArrayType) indexAccessExpr.expr.type; BLangLiteral indexExpr = (BLangLiteral) indexAccessExpr.indexExpr; Long indexVal = (Long) indexExpr.getValue(); if (bArrayType.state == BArrayState.CLOSED_SEALED && (bArrayType.size <= indexVal)) { dlog.error(indexExpr.pos, DiagnosticCode.ARRAY_INDEX_OUT_OF_RANGE, indexVal, bArrayType.size); } } } public void visit(BLangInvocation invocationExpr) { analyzeExpr(invocationExpr.expr); analyzeExprs(invocationExpr.requiredArgs); analyzeExprs(invocationExpr.namedArgs); analyzeExprs(invocationExpr.restArgs); checkDuplicateNamedArgs(invocationExpr.namedArgs); if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) { BSymbol funcSymbol = invocationExpr.symbol; if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) { dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION, names.fromIdNode(invocationExpr.name)); } } if (invocationExpr.actionInvocation) { validateActionInvocation(invocationExpr.pos, invocationExpr); } } private void validateActionInvocation(DiagnosticPos pos, BLangNode bLangNode) { BLangNode parent = bLangNode.parent; while (parent != null) { final NodeKind kind = parent.getKind(); if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE) { return; } else if (kind == NodeKind.CHECK_EXPR || kind == NodeKind.MATCH_EXPRESSION) { parent = parent.parent; continue; } else if (kind == NodeKind.ELVIS_EXPR && ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) { parent = parent.parent; continue; } break; } dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } public void visit(BLangTypeInit cIExpr) { analyzeExprs(cIExpr.argsExpr); analyzeExpr(cIExpr.objectInitInvocation); } public void visit(BLangTernaryExpr ternaryExpr) { analyzeExpr(ternaryExpr.expr); analyzeExpr(ternaryExpr.thenExpr); analyzeExpr(ternaryExpr.elseExpr); } public void visit(BLangAwaitExpr awaitExpr) { analyzeExpr(awaitExpr.expr); } public void visit(BLangBinaryExpr binaryExpr) { analyzeExpr(binaryExpr.lhsExpr); analyzeExpr(binaryExpr.rhsExpr); } public void visit(BLangElvisExpr elvisExpr) { analyzeExpr(elvisExpr.lhsExpr); analyzeExpr(elvisExpr.rhsExpr); } @Override public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) { analyzeExprs(bracedOrTupleExpr.expressions); } public void visit(BLangUnaryExpr unaryExpr) { analyzeExpr(unaryExpr.expr); } public void visit(BLangTypedescExpr accessExpr) { /* ignore */ } public void visit(BLangTypeConversionExpr conversionExpr) { analyzeExpr(conversionExpr.expr); } public void visit(BLangXMLQName xmlQName) { /* ignore */ } public void visit(BLangXMLAttribute xmlAttribute) { analyzeExpr(xmlAttribute.name); analyzeExpr(xmlAttribute.value); } public void visit(BLangXMLElementLiteral xmlElementLiteral) { analyzeExpr(xmlElementLiteral.startTagName); analyzeExpr(xmlElementLiteral.endTagName); analyzeExprs(xmlElementLiteral.attributes); analyzeExprs(xmlElementLiteral.children); } public void visit(BLangXMLTextLiteral xmlTextLiteral) { analyzeExprs(xmlTextLiteral.textFragments); } public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { analyzeExprs(xmlCommentLiteral.textFragments); } public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { analyzeExprs(xmlProcInsLiteral.dataFragments); analyzeExpr(xmlProcInsLiteral.target); } public void visit(BLangXMLQuotedString xmlQuotedString) { analyzeExprs(xmlQuotedString.textFragments); } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { analyzeExprs(stringTemplateLiteral.exprs); } public void visit(BLangLambdaFunction bLangLambdaFunction) { /* ignore */ } public void visit(BLangArrowFunction bLangArrowFunction) { /* ignore */ } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { analyzeExpr(xmlAttributeAccessExpr.expr); analyzeExpr(xmlAttributeAccessExpr.indexExpr); } public void visit(BLangIntRangeExpression intRangeExpression) { analyzeExpr(intRangeExpression.startExpr); analyzeExpr(intRangeExpression.endExpr); } public void visit(BLangValueType valueType) { /* ignore */ } public void visit(BLangArrayType arrayType) { /* ignore */ } public void visit(BLangBuiltInRefTypeNode builtInRefType) { /* ignore */ } public void visit(BLangConstrainedType constrainedType) { /* ignore */ } public void visit(BLangUserDefinedType userDefinedType) { analyseType(userDefinedType.type, userDefinedType.pos); } public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env)); } public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env)); } public void visit(BLangFunctionTypeNode functionTypeNode) { analyseType(functionTypeNode.type, functionTypeNode.pos); } @Override public void visit(BLangTableQueryExpression tableQueryExpression) { /* ignore */ } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { /* ignore */ } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { /* ignore */ } @Override public void visit(BLangMatchExpression bLangMatchExpression) { analyzeExpr(bLangMatchExpression.expr); List<BType> exprTypes; if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.memberTypes); } else { exprTypes = Lists.of(bLangMatchExpression.expr.type); } List<BType> unmatchedExprTypes = new ArrayList<>(); for (BType exprType : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternType = pattern.variable.type; if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) { return; } assignable = this.types.isAssignable(exprType, patternType); if (assignable) { pattern.matchedTypesDirect.add(exprType); break; } else if (exprType.tag == TypeTags.ANY) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD) && this.types.isAssignable(patternType, exprType)) { pattern.matchedTypesIndirect.add(exprType); } else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) { pattern.matchedTypesDirect.add(exprType); break; } else { } } if (!assignable && !this.types.isAssignable(exprType, bLangMatchExpression.type)) { unmatchedExprTypes.add(exprType); } } if (!unmatchedExprTypes.isEmpty()) { dlog.error(bLangMatchExpression.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN, unmatchedExprTypes); } boolean matchedPatternsAvailable = false; for (int i = bLangMatchExpression.patternClauses.size() - 1; i >= 0; i--) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) { if (matchedPatternsAvailable) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); } else { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } else { matchedPatternsAvailable = true; } } } @Override public void visit(BLangCheckedExpr checkedExpr) { } private <E extends BLangExpression> void analyzeExpr(E node) { if (node == null) { return; } BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; checkAccess(node); } @Override public void visit(BLangScope scopeNode) { this.checkStatementExecutionValidity(scopeNode); scopeNode.getScopeBody().accept(this); this.resetLastStatement(); visit(scopeNode.compensationFunction); } @Override public void visit(BLangCompensate compensateNode) { this.checkStatementExecutionValidity(compensateNode); } /** * This method checks for private symbols being accessed or used outside of package and|or private symbols being * used in public fields of objects/records and will fail those occurrences. * * @param node expression node to analyse */ private <E extends BLangExpression> void checkAccess(E node) { if (node.type != null) { checkAccessSymbol(node.type.tsymbol, node.pos); } if (node instanceof BLangInvocation) { BLangInvocation bLangInvocation = (BLangInvocation) node; checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos); } } private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) { if (symbol == null) { return; } if (env.enclPkg.symbol.pkgID != symbol.pkgID && Symbols.isPrivate(symbol)) { dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name); } } private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.get(i).accept(this); } } private void initNewWorkerActionSystem() { this.workerActionSystemStack.push(new WorkerActionSystem()); } private void finalizeCurrentWorkerActionSystem() { WorkerActionSystem was = this.workerActionSystemStack.pop(); this.validateWorkerInteractions(was); } private static boolean isWorkerSend(BLangStatement action) { return action.getKind() == NodeKind.WORKER_SEND; } private static boolean isWorkerForkSend(BLangStatement action) { return ((BLangWorkerSend) action).isForkJoinSend; } private String extractWorkerId(BLangStatement action) { if (isWorkerSend(action)) { return ((BLangWorkerSend) action).workerIdentifier.value; } else { return ((BLangWorkerReceive) action).workerIdentifier.value; } } private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) { this.validateForkJoinSendsToFork(workerActionSystem); BLangStatement currentAction; WorkerActionStateMachine currentSM; String currentWorkerId; boolean systemRunning; do { systemRunning = false; for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) { currentWorkerId = entry.getKey(); currentSM = entry.getValue(); if (currentSM.done()) { continue; } currentAction = currentSM.currentAction(); if (isWorkerSend(currentAction)) { if (isWorkerForkSend(currentAction)) { currentSM.next(); systemRunning = true; } else { WorkerActionStateMachine otherSM = workerActionSystem.get(this.extractWorkerId(currentAction)); if (otherSM.currentIsReceive(currentWorkerId)) { this.validateWorkerActionParameters((BLangWorkerSend) currentAction, (BLangWorkerReceive) otherSM.currentAction()); otherSM.next(); currentSM.next(); systemRunning = true; } } } } } while (systemRunning); if (!workerActionSystem.everyoneDone()) { this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem); } } private void validateForkJoinSendsToFork(WorkerActionSystem workerActionSystem) { for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) { this.validateForkJoinSendsToFork(entry.getValue()); } } private void validateForkJoinSendsToFork(WorkerActionStateMachine sm) { boolean sentToFork = false; for (BLangStatement action : sm.actions) { if (isWorkerSend(action) && isWorkerForkSend(action)) { if (sentToFork) { this.dlog.error(action.pos, DiagnosticCode.INVALID_MULTIPLE_FORK_JOIN_SEND); } else { sentToFork = true; } } } } private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) { this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION, workerActionSystem.toString()); } private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) { this.typeChecker.checkExpr(send.expr, send.env, receive.expr.type); } private boolean checkNextBreakValidityInTransaction() { return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0; } private boolean checkReturnValidityInTransaction() { return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek()) && transactionCount > 0; } private boolean isValidTransactionBlock() { return (this.transactionWithinHandlerCheckStack.empty() || !this.transactionWithinHandlerCheckStack.peek()) && !this.withinRetryBlock; } private void validateMainFunction(BLangFunction funcNode) { if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) { return; } if (!Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC); } if (!(funcNode.symbol.retType.tag == TypeTags.NIL || funcNode.symbol.retType.tag == TypeTags.INT)) { this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.INVALID_RETURN_WITH_MAIN, funcNode.symbol.retType); } } private void checkDuplicateNamedArgs(List<BLangExpression> args) { List<BLangIdentifier> existingArgs = new ArrayList<>(); args.forEach(arg -> { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; if (existingArgs.contains(namedArg.name)) { dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name); } existingArgs.add(namedArg.name); }); } /** * This class contains the state machines for a set of workers. */ private static class WorkerActionSystem { public Map<String, WorkerActionStateMachine> workerActionStateMachines = new LinkedHashMap<>(); private WorkerActionStateMachine currentSM; private String currentWorkerId; public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos) { this.currentWorkerId = workerId; this.currentSM = new WorkerActionStateMachine(pos); } public void endWorkerActionStateMachine() { this.workerActionStateMachines.put(this.currentWorkerId, this.currentSM); } public void addWorkerAction(BLangStatement action) { this.currentSM.actions.add(action); } public WorkerActionStateMachine get(String workerId) { return this.workerActionStateMachines.get(workerId); } public Set<Map.Entry<String, WorkerActionStateMachine>> entrySet() { return this.workerActionStateMachines.entrySet(); } public boolean everyoneDone() { return this.workerActionStateMachines.values().stream().allMatch(WorkerActionStateMachine::done); } public DiagnosticPos getRootPosition() { return this.workerActionStateMachines.values().iterator().next().pos; } @Override public String toString() { return this.workerActionStateMachines.toString(); } } /** * This class represents a state machine to maintain the state of the send/receive * actions of a worker. */ private static class WorkerActionStateMachine { private static final String WORKER_SM_FINISHED = "FINISHED"; public int currentState; public List<BLangStatement> actions = new ArrayList<>(); public DiagnosticPos pos; public WorkerActionStateMachine(DiagnosticPos pos) { this.pos = pos; } public boolean done() { return this.actions.size() == this.currentState; } public BLangStatement currentAction() { return this.actions.get(this.currentState); } public boolean currentIsReceive(String sourceWorkerId) { if (this.done()) { return false; } BLangStatement action = this.currentAction(); return !isWorkerSend(action) && ((BLangWorkerReceive) action). workerIdentifier.value.equals(sourceWorkerId); } public void next() { this.currentState++; } @Override public String toString() { if (this.done()) { return WORKER_SM_FINISHED; } else { BLangStatement action = this.currentAction(); if (isWorkerSend(action)) { return ((BLangWorkerSend) action).toActionString(); } else { return ((BLangWorkerReceive) action).toActionString(); } } } } }
An underscore is already included in “label".
public LoadResponse loadBatch(StringBuilder sb, boolean slowLog) { Calendar calendar = Calendar.getInstance(); String label = String.format("_log_%s%02d%02d_%02d%02d%02d_%s", calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar.get(Calendar.DAY_OF_MONTH), calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND), feIdentity); HttpURLConnection feConn = null; HttpURLConnection beConn = null; try { if (slowLog) { label = "slow" + label; feConn = getConnection(slowLogLoadUrlStr, label); } else { label = "audit" + label; feConn = getConnection(auditLogLoadUrlStr, label); } int status = feConn.getResponseCode(); if (status != 307) { throw new Exception("status is not TEMPORARY_REDIRECT 307, status: " + status + ", response: " + getContent(feConn) + ", request is: " + toCurl(feConn)); } String location = feConn.getHeaderField("Location"); if (location == null) { throw new Exception("redirect location is null"); } beConn = getConnection(location, label); BufferedOutputStream bos = new BufferedOutputStream(beConn.getOutputStream()); bos.write(sb.toString().getBytes()); bos.close(); status = beConn.getResponseCode(); String respMsg = beConn.getResponseMessage(); String response = getContent(beConn); LOG.info("AuditLoader plugin load with label: {}, response code: {}, msg: {}, content: {}", label, status, respMsg, response); return new LoadResponse(status, respMsg, response); } catch (Exception e) { e.printStackTrace(); String err = "failed to load audit via AuditLoader plugin with label: " + label; LOG.warn(err, e); return new LoadResponse(-1, e.getMessage(), err); } finally { if (feConn != null) { feConn.disconnect(); } if (beConn != null) { beConn.disconnect(); } } }
label = "slow" + label;
public LoadResponse loadBatch(StringBuilder sb, boolean slowLog) { Calendar calendar = Calendar.getInstance(); String label = String.format("_log_%s%02d%02d_%02d%02d%02d_%s", calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar.get(Calendar.DAY_OF_MONTH), calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND), feIdentity); HttpURLConnection feConn = null; HttpURLConnection beConn = null; try { if (slowLog) { label = "slow" + label; feConn = getConnection(slowLogLoadUrlStr, label); } else { label = "audit" + label; feConn = getConnection(auditLogLoadUrlStr, label); } int status = feConn.getResponseCode(); if (status != 307) { throw new Exception("status is not TEMPORARY_REDIRECT 307, status: " + status + ", response: " + getContent(feConn) + ", request is: " + toCurl(feConn)); } String location = feConn.getHeaderField("Location"); if (location == null) { throw new Exception("redirect location is null"); } beConn = getConnection(location, label); BufferedOutputStream bos = new BufferedOutputStream(beConn.getOutputStream()); bos.write(sb.toString().getBytes()); bos.close(); status = beConn.getResponseCode(); String respMsg = beConn.getResponseMessage(); String response = getContent(beConn); LOG.info("AuditLoader plugin load with label: {}, response code: {}, msg: {}, content: {}", label, status, respMsg, response); return new LoadResponse(status, respMsg, response); } catch (Exception e) { e.printStackTrace(); String err = "failed to load audit via AuditLoader plugin with label: " + label; LOG.warn(err, e); return new LoadResponse(-1, e.getMessage(), err); } finally { if (feConn != null) { feConn.disconnect(); } if (beConn != null) { beConn.disconnect(); } } }
class DorisStreamLoader { private final static Logger LOG = LogManager.getLogger(DorisStreamLoader.class); private static String loadUrlPattern = "http: private String hostPort; private String db; private String auditLogTbl; private String slowLogTbl; private String user; private String passwd; private String auditLogLoadUrlStr; private String slowLogLoadUrlStr; private String authEncoding; private String feIdentity; public DorisStreamLoader(AuditLoaderPlugin.AuditLoaderConf conf) { this.hostPort = conf.frontendHostPort; this.db = conf.database; this.auditLogTbl = conf.auditLogTable; this.slowLogTbl = conf.slowLogTable; this.user = conf.user; this.passwd = conf.password; this.auditLogLoadUrlStr = String.format(loadUrlPattern, hostPort, db, auditLogTbl); this.slowLogLoadUrlStr = String.format(loadUrlPattern, hostPort, db, slowLogTbl); this.authEncoding = Base64.getEncoder().encodeToString(String.format("%s:%s", user, passwd).getBytes(StandardCharsets.UTF_8)); this.feIdentity = conf.feIdentity.replaceAll("\\.", "_"); } private HttpURLConnection getConnection(String urlStr, String label) throws IOException { URL url = new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setInstanceFollowRedirects(false); conn.setRequestMethod("PUT"); conn.setRequestProperty("Authorization", "Basic " + authEncoding); conn.addRequestProperty("Expect", "100-continue"); conn.addRequestProperty("Content-Type", "text/plain; charset=UTF-8"); conn.addRequestProperty("label", label); conn.addRequestProperty("max_filter_ratio", "1.0"); conn.addRequestProperty("columns", "query_id, `time`, client_ip, user, db, state, query_time, scan_bytes," + " scan_rows, return_rows, stmt_id, is_query, frontend_ip, cpu_time_ms, sql_hash, sql_digest, peak_memory_bytes, stmt"); conn.setDoOutput(true); conn.setDoInput(true); return conn; } private String toCurl(HttpURLConnection conn) { StringBuilder sb = new StringBuilder("curl -v "); sb.append("-X ").append(conn.getRequestMethod()).append(" \\\n "); sb.append("-H \"").append("Authorization\":").append("\"Basic " + authEncoding).append("\" \\\n "); sb.append("-H \"").append("Expect\":").append("\"100-continue\" \\\n "); sb.append("-H \"").append("Content-Type\":").append("\"text/plain; charset=UTF-8\" \\\n "); sb.append("-H \"").append("max_filter_ratio\":").append("\"1.0\" \\\n "); sb.append("-H \"").append("columns\":").append("\"query_id, time, client_ip, user, db, state, query_time," + " scan_bytes, scan_rows, return_rows, stmt_id, is_query, frontend_ip, cpu_time_ms, sql_hash," + " sql_digest, peak_memory_bytes, stmt\" \\\n "); sb.append("\"").append(conn.getURL()).append("\""); return sb.toString(); } private String getContent(HttpURLConnection conn) { BufferedReader br = null; StringBuilder response = new StringBuilder(); String line; try { if (100 <= conn.getResponseCode() && conn.getResponseCode() <= 399) { br = new BufferedReader(new InputStreamReader(conn.getInputStream())); } else { br = new BufferedReader(new InputStreamReader(conn.getErrorStream())); } while ((line = br.readLine()) != null) { response.append(line); } } catch (IOException e) { LOG.warn("get content error,", e); } return response.toString(); } public static class LoadResponse { public int status; public String respMsg; public String respContent; public LoadResponse(int status, String respMsg, String respContent) { this.status = status; this.respMsg = respMsg; this.respContent = respContent; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("status: ").append(status); sb.append(", resp msg: ").append(respMsg); sb.append(", resp content: ").append(respContent); return sb.toString(); } } }
class DorisStreamLoader { private final static Logger LOG = LogManager.getLogger(DorisStreamLoader.class); private static String loadUrlPattern = "http: private String hostPort; private String db; private String auditLogTbl; private String slowLogTbl; private String user; private String passwd; private String auditLogLoadUrlStr; private String slowLogLoadUrlStr; private String authEncoding; private String feIdentity; public DorisStreamLoader(AuditLoaderPlugin.AuditLoaderConf conf) { this.hostPort = conf.frontendHostPort; this.db = conf.database; this.auditLogTbl = conf.auditLogTable; this.slowLogTbl = conf.slowLogTable; this.user = conf.user; this.passwd = conf.password; this.auditLogLoadUrlStr = String.format(loadUrlPattern, hostPort, db, auditLogTbl); this.slowLogLoadUrlStr = String.format(loadUrlPattern, hostPort, db, slowLogTbl); this.authEncoding = Base64.getEncoder().encodeToString(String.format("%s:%s", user, passwd).getBytes(StandardCharsets.UTF_8)); this.feIdentity = conf.feIdentity.replaceAll("\\.", "_"); } private HttpURLConnection getConnection(String urlStr, String label) throws IOException { URL url = new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setInstanceFollowRedirects(false); conn.setRequestMethod("PUT"); conn.setRequestProperty("Authorization", "Basic " + authEncoding); conn.addRequestProperty("Expect", "100-continue"); conn.addRequestProperty("Content-Type", "text/plain; charset=UTF-8"); conn.addRequestProperty("label", label); conn.addRequestProperty("max_filter_ratio", "1.0"); conn.addRequestProperty("columns", "query_id, `time`, client_ip, user, db, state, query_time, scan_bytes," + " scan_rows, return_rows, stmt_id, is_query, frontend_ip, cpu_time_ms, sql_hash, sql_digest, peak_memory_bytes, stmt"); conn.setDoOutput(true); conn.setDoInput(true); return conn; } private String toCurl(HttpURLConnection conn) { StringBuilder sb = new StringBuilder("curl -v "); sb.append("-X ").append(conn.getRequestMethod()).append(" \\\n "); sb.append("-H \"").append("Authorization\":").append("\"Basic " + authEncoding).append("\" \\\n "); sb.append("-H \"").append("Expect\":").append("\"100-continue\" \\\n "); sb.append("-H \"").append("Content-Type\":").append("\"text/plain; charset=UTF-8\" \\\n "); sb.append("-H \"").append("max_filter_ratio\":").append("\"1.0\" \\\n "); sb.append("-H \"").append("columns\":").append("\"query_id, time, client_ip, user, db, state, query_time," + " scan_bytes, scan_rows, return_rows, stmt_id, is_query, frontend_ip, cpu_time_ms, sql_hash," + " sql_digest, peak_memory_bytes, stmt\" \\\n "); sb.append("\"").append(conn.getURL()).append("\""); return sb.toString(); } private String getContent(HttpURLConnection conn) { BufferedReader br = null; StringBuilder response = new StringBuilder(); String line; try { if (100 <= conn.getResponseCode() && conn.getResponseCode() <= 399) { br = new BufferedReader(new InputStreamReader(conn.getInputStream())); } else { br = new BufferedReader(new InputStreamReader(conn.getErrorStream())); } while ((line = br.readLine()) != null) { response.append(line); } } catch (IOException e) { LOG.warn("get content error,", e); } return response.toString(); } public static class LoadResponse { public int status; public String respMsg; public String respContent; public LoadResponse(int status, String respMsg, String respContent) { this.status = status; this.respMsg = respMsg; this.respContent = respContent; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("status: ").append(status); sb.append(", resp msg: ").append(respMsg); sb.append(", resp content: ").append(respContent); return sb.toString(); } } }
``` This is unlikely to work correctly. ``` Are you sure about this? I think it should be possible to run this without a context. Not sure why we had that issue only for MySQL and only on CI.
public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named(healthCheckResponseName); builder.up(); for (Map.Entry<String, Pool> pgPoolEntry : pools.entrySet()) { final String dataSourceName = pgPoolEntry.getKey(); final Pool pgPool = pgPoolEntry.getValue(); try { CompletableFuture<Void> databaseConnectionAttempt = new CompletableFuture<>(); Context context = Vertx.currentContext(); if (context != null) { log.debug("Run health check on the current Vert.x context"); context.runOnContext(v -> { pgPool.query(healthCheckSQL) .execute(ar -> { checkFailure(ar, builder, dataSourceName); databaseConnectionAttempt.complete(null); }); }); } else { log.warn("Vert.x context unavailable to perform healthcheck of reactive datasource `" + dataSourceName + "`. This is unlikely to work correctly."); pgPool.query(healthCheckSQL) .execute(ar -> { checkFailure(ar, builder, dataSourceName); databaseConnectionAttempt.complete(null); }); } databaseConnectionAttempt.get(20, TimeUnit.SECONDS); builder.withData(dataSourceName, "up"); } catch (RuntimeException | ExecutionException exception) { operationsError(dataSourceName, exception); builder.down(); builder.withData(dataSourceName, "down - connection failed: " + exception.getMessage()); } catch (InterruptedException e) { log.warn("Interrupted while obtaining database connection for healthcheck of datasource " + dataSourceName); Thread.currentThread().interrupt(); return builder.build(); } catch (TimeoutException e) { log.warn("Timed out while waiting for an available connection to perform healthcheck of datasource " + dataSourceName); builder.down(); builder.withData(dataSourceName, "timed out, unable to obtain connection to perform healthcheck of datasource"); } } return builder.build(); }
log.warn("Vert.x context unavailable to perform healthcheck of reactive datasource `" + dataSourceName + "`. This is unlikely to work correctly.");
public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named(healthCheckResponseName); builder.up(); for (Map.Entry<String, Pool> pgPoolEntry : pools.entrySet()) { final String dataSourceName = pgPoolEntry.getKey(); final Pool pgPool = pgPoolEntry.getValue(); try { CompletableFuture<Void> databaseConnectionAttempt = new CompletableFuture<>(); Context context = Vertx.currentContext(); if (context != null) { log.debug("Run health check on the current Vert.x context"); context.runOnContext(v -> { pgPool.query(healthCheckSQL) .execute(ar -> { checkFailure(ar, builder, dataSourceName); databaseConnectionAttempt.complete(null); }); }); } else { log.warn("Vert.x context unavailable to perform healthcheck of reactive datasource `" + dataSourceName + "`. This is unlikely to work correctly."); pgPool.query(healthCheckSQL) .execute(ar -> { checkFailure(ar, builder, dataSourceName); databaseConnectionAttempt.complete(null); }); } databaseConnectionAttempt.get(20, TimeUnit.SECONDS); builder.withData(dataSourceName, "up"); } catch (RuntimeException | ExecutionException exception) { operationsError(dataSourceName, exception); builder.down(); builder.withData(dataSourceName, "down - connection failed: " + exception.getMessage()); } catch (InterruptedException e) { log.warn("Interrupted while obtaining database connection for healthcheck of datasource " + dataSourceName); Thread.currentThread().interrupt(); return builder.build(); } catch (TimeoutException e) { log.warn("Timed out while waiting for an available connection to perform healthcheck of datasource " + dataSourceName); builder.down(); builder.withData(dataSourceName, "timed out, unable to obtain connection to perform healthcheck of datasource"); } } return builder.build(); }
class ReactiveDatasourceHealthCheck implements HealthCheck { private static final Logger log = Logger.getLogger(ReactiveDatasourceHealthCheck.class); private final Map<String, Pool> pools = new ConcurrentHashMap<>(); private final String healthCheckResponseName; private final String healthCheckSQL; protected ReactiveDatasourceHealthCheck(String healthCheckResponseName, String healthCheckSQL) { this.healthCheckResponseName = healthCheckResponseName; this.healthCheckSQL = healthCheckSQL; } protected void addPool(String name, Pool p) { final Pool previous = pools.put(name, p); if (previous != null) { throw new IllegalStateException("Duplicate pool name: " + name); } } @Override private void operationsError(final String datasourceName, final Throwable cause) { log.warn("Error obtaining database connection for healthcheck of datasource '" + datasourceName + '\'', cause); } private void checkFailure(AsyncResult<RowSet<Row>> ar, HealthCheckResponseBuilder builder, String dataSourceName) { if (ar.failed()) { operationsError(dataSourceName, ar.cause()); builder.down(); builder.withData(dataSourceName, "down - connection failed: " + ar.cause().getMessage()); } } protected String getPoolName(Bean<?> bean) { for (Object qualifier : bean.getQualifiers()) { if (qualifier instanceof ReactiveDataSource) { return ((ReactiveDataSource) qualifier).value(); } } return DataSourceUtil.DEFAULT_DATASOURCE_NAME; } }
class ReactiveDatasourceHealthCheck implements HealthCheck { private static final Logger log = Logger.getLogger(ReactiveDatasourceHealthCheck.class); private final Map<String, Pool> pools = new ConcurrentHashMap<>(); private final String healthCheckResponseName; private final String healthCheckSQL; protected ReactiveDatasourceHealthCheck(String healthCheckResponseName, String healthCheckSQL) { this.healthCheckResponseName = healthCheckResponseName; this.healthCheckSQL = healthCheckSQL; } protected void addPool(String name, Pool p) { final Pool previous = pools.put(name, p); if (previous != null) { throw new IllegalStateException("Duplicate pool name: " + name); } } @Override private void operationsError(final String datasourceName, final Throwable cause) { log.warn("Error obtaining database connection for healthcheck of datasource '" + datasourceName + '\'', cause); } private void checkFailure(AsyncResult<RowSet<Row>> ar, HealthCheckResponseBuilder builder, String dataSourceName) { if (ar.failed()) { operationsError(dataSourceName, ar.cause()); builder.down(); builder.withData(dataSourceName, "down - connection failed: " + ar.cause().getMessage()); } } protected String getPoolName(Bean<?> bean) { for (Object qualifier : bean.getQualifiers()) { if (qualifier instanceof ReactiveDataSource) { return ((ReactiveDataSource) qualifier).value(); } } return DataSourceUtil.DEFAULT_DATASOURCE_NAME; } }
Wondering if this should be somehow connected to `TaskManagerServices#LOCAL_STATE_SUB_DIRECTORY_ROOT`.
private WorkingDirectory(File root) throws IOException { this.root = root; createDirectory(root); this.tmp = new File(root, "tmp"); createDirectory(tmp); FileUtils.cleanDirectory(tmp); localState = new File(root, "localState"); createDirectory(localState); }
localState = new File(root, "localState");
private WorkingDirectory(File root) throws IOException { this.root = root; createDirectory(root); this.tmp = new File(root, "tmp"); createDirectory(tmp); FileUtils.cleanDirectory(tmp); localState = new File(root, "localState"); createDirectory(localState); }
class WorkingDirectory { private final File root; private final File tmp; private final File localState; private static void createDirectory(File directory) throws IOException { if (!directory.mkdirs() && !directory.exists()) { throw new IOException( String.format("Could not create the working directory %s.", directory)); } } public void delete() throws IOException { FileUtils.deleteDirectory(root); } public File getTmpDirectory() { return tmp; } public File getLocalStateDirectory() { return localState; } public WorkingDirectory createSubWorkingDirectory(String directoryName) throws IOException { return createIn(root, directoryName); } @Override public String toString() { return String.format("WorkingDirectory(%s)", root.toString()); } public static WorkingDirectory create(File workingDirectory) throws IOException { return new WorkingDirectory(workingDirectory); } public static WorkingDirectory createIn(File parentDirectory, String directoryName) throws IOException { return create(new File(parentDirectory, directoryName)); } }
class WorkingDirectory { private final File root; private final File tmp; private final File localState; private static void createDirectory(File directory) throws IOException { if (!directory.mkdirs() && !directory.exists()) { throw new IOException( String.format("Could not create the working directory %s.", directory)); } } public void delete() throws IOException { FileUtils.deleteDirectory(root); } public File getTmpDirectory() { return tmp; } public File getLocalStateDirectory() { return localState; } public WorkingDirectory createSubWorkingDirectory(String directoryName) throws IOException { return createIn(root, directoryName); } @Override public String toString() { return String.format("WorkingDirectory(%s)", root.toString()); } public static WorkingDirectory create(File workingDirectory) throws IOException { return new WorkingDirectory(workingDirectory); } public static WorkingDirectory createIn(File parentDirectory, String directoryName) throws IOException { return create(new File(parentDirectory, directoryName)); } }
we really don't want pretty-printed bodies. Structured logs are exported somewhere where pretty printing is not necessary or, when sent to file or stdout, multi-line prettiness becomes ungreppable and unparseable.
public HttpLoggingPolicy(HttpLogOptions httpLogOptions) { if (httpLogOptions == null) { this.httpLogDetailLevel = HttpLogDetailLevel.ENVIRONMENT_HTTP_LOG_DETAIL_LEVEL; this.allowedHeaderNames = HttpLogOptions.DEFAULT_HEADERS_ALLOWLIST .stream() .map(headerName -> headerName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.allowedQueryParameterNames = HttpLogOptions.DEFAULT_QUERY_PARAMS_ALLOWLIST .stream() .map(queryParamName -> queryParamName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.prettyPrintBody = false; this.requestLogger = new DefaultHttpRequestLogger(); this.responseLogger = new DefaultHttpResponseLogger(); } else { this.httpLogDetailLevel = httpLogOptions.getLogLevel(); this.allowedHeaderNames = httpLogOptions.getAllowedHeaderNames() .stream() .map(headerName -> headerName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.allowedQueryParameterNames = httpLogOptions.getAllowedQueryParamNames() .stream() .map(queryParamName -> queryParamName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.prettyPrintBody = httpLogOptions.isPrettyPrintBody(); this.requestLogger = (httpLogOptions.getRequestLogger() == null) ? new DefaultHttpRequestLogger() : httpLogOptions.getRequestLogger(); this.responseLogger = (httpLogOptions.getResponseLogger() == null) ? new DefaultHttpResponseLogger() : httpLogOptions.getResponseLogger(); } }
this.prettyPrintBody = false;
public HttpLoggingPolicy(HttpLogOptions httpLogOptions) { if (httpLogOptions == null) { this.httpLogDetailLevel = HttpLogDetailLevel.ENVIRONMENT_HTTP_LOG_DETAIL_LEVEL; this.allowedHeaderNames = HttpLogOptions.DEFAULT_HEADERS_ALLOWLIST .stream() .map(headerName -> headerName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.allowedQueryParameterNames = HttpLogOptions.DEFAULT_QUERY_PARAMS_ALLOWLIST .stream() .map(queryParamName -> queryParamName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.prettyPrintBody = false; this.requestLogger = new DefaultHttpRequestLogger(); this.responseLogger = new DefaultHttpResponseLogger(); } else { this.httpLogDetailLevel = httpLogOptions.getLogLevel(); this.allowedHeaderNames = httpLogOptions.getAllowedHeaderNames() .stream() .map(headerName -> headerName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.allowedQueryParameterNames = httpLogOptions.getAllowedQueryParamNames() .stream() .map(queryParamName -> queryParamName.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()); this.prettyPrintBody = httpLogOptions.isPrettyPrintBody(); this.requestLogger = (httpLogOptions.getRequestLogger() == null) ? new DefaultHttpRequestLogger() : httpLogOptions.getRequestLogger(); this.responseLogger = (httpLogOptions.getResponseLogger() == null) ? new DefaultHttpResponseLogger() : httpLogOptions.getResponseLogger(); } }
class HttpLoggingPolicy implements HttpPipelinePolicy { private static final ObjectMapperShim PRETTY_PRINTER = ObjectMapperShim.createPrettyPrintMapper(); private static final int MAX_BODY_LOG_SIZE = 1024 * 16; private static final String REDACTED_PLACEHOLDER = "REDACTED"; private static final int LOGGER_CACHE_MAX_SIZE = 1000; private static final Map<String, ClientLogger> CALLER_METHOD_LOGGER_CACHE = new ConcurrentHashMap<>(); private static final ClientLogger LOGGER = new ClientLogger(HttpLoggingPolicy.class); private final HttpLogDetailLevel httpLogDetailLevel; private final Set<String> allowedHeaderNames; private final Set<String> allowedQueryParameterNames; private final boolean prettyPrintBody; private final HttpRequestLogger requestLogger; private final HttpResponseLogger responseLogger; /** * Key for {@link Context} to pass request retry count metadata for logging. */ public static final String RETRY_COUNT_CONTEXT = "requestRetryCount"; private static final String REQUEST_LOG_MESSAGE = "HTTP request"; private static final String RESPONSE_LOG_MESSAGE = "HTTP response"; /** * Creates an HttpLoggingPolicy with the given log configurations. * * @param httpLogOptions The HTTP logging configuration options. */ @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { return next.process(); } final ClientLogger logger = getOrCreateMethodLogger((String) context.getData("caller-method").orElse("")); final long startNs = System.nanoTime(); return requestLogger.logRequest(logger, getRequestLoggingOptions(context)) .then(next.process()) .flatMap(response -> responseLogger.logResponse(logger, getResponseLoggingOptions(response, startNs, context))) .doOnError(throwable -> logger.warning("<-- HTTP FAILED: ", throwable)); } @Override public HttpResponse processSync(HttpPipelineCallContext context, HttpPipelineNextSyncPolicy next) { if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { return next.processSync(); } final ClientLogger logger = getOrCreateMethodLogger((String) context.getData("caller-method").orElse("")); final long startNs = System.nanoTime(); requestLogger.logRequestSync(logger, getRequestLoggingOptions(context)); try { HttpResponse response = next.processSync(); if (response != null) { response = responseLogger.logResponseSync( logger, getResponseLoggingOptions(response, startNs, context)); } return response; } catch (RuntimeException e) { logger.warning("<-- HTTP FAILED: ", e); throw logger.logExceptionAsWarning(e); } } private HttpRequestLoggingContext getRequestLoggingOptions(HttpPipelineCallContext callContext) { return new HttpRequestLoggingContext(callContext.getHttpRequest(), callContext.getContext(), getRequestRetryCount(callContext.getContext())); } private HttpResponseLoggingContext getResponseLoggingOptions(HttpResponse httpResponse, long startNs, HttpPipelineCallContext callContext) { return new HttpResponseLoggingContext(httpResponse, Duration.ofNanos(System.nanoTime() - startNs), callContext.getContext(), getRequestRetryCount(callContext.getContext())); } private final class DefaultHttpRequestLogger implements HttpRequestLogger { @Override public Mono<Void> logRequest(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (logger.canLogAtLevel(logLevel)) { log(logLevel, logger, loggingOptions); } return Mono.empty(); } @Override public void logRequestSync(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (logger.canLogAtLevel(logLevel)) { log(logLevel, logger, loggingOptions); } } private void log(LogLevel logLevel, ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final HttpRequest request = loggingOptions.getHttpRequest(); LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); if (httpLogDetailLevel.shouldLogUrl()) { logBuilder .addKeyValue(LoggingKeys.HTTP_METHOD_KEY, request.getHttpMethod()) .addKeyValue(LoggingKeys.URL_KEY, getRedactedUrl(request.getUrl(), allowedQueryParameterNames)); Integer retryCount = loggingOptions.getTryCount(); if (retryCount != null) { logBuilder.addKeyValue(LoggingKeys.TRY_COUNT_KEY, retryCount); } } if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.INFORMATIONAL)) { addHeadersToLogMessage(allowedHeaderNames, request.getHeaders(), logBuilder); } if (request.getBody() == null) { logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, 0) .log(REQUEST_LOG_MESSAGE); return; } String contentType = request.getHeaders().getValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, request.getHeaders()); logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, contentLength); if (httpLogDetailLevel.shouldLogBody() && shouldBodyBeLogged(contentType, contentLength)) { logBody(request, (int) contentLength, logBuilder, logger, contentType); return; } logBuilder.log(REQUEST_LOG_MESSAGE); } } private void logBody(HttpRequest request, int contentLength, LoggingEventBuilder logBuilder, ClientLogger logger, String contentType) { BinaryData data = request.getBodyAsBinaryData(); BinaryDataContent content = BinaryDataHelper.getContent(data); if (content instanceof StringContent || content instanceof ByteBufferContent || content instanceof SerializableContent || content instanceof ByteArrayContent) { logBody(logBuilder, logger, contentType, content.toString()); } else if (content instanceof InputStreamContent) { byte[] contentBytes = content.toBytes(); request.setBody(contentBytes); logBody(logBuilder, logger, contentType, new String(contentBytes, StandardCharsets.UTF_8)); } else { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(contentLength); request.setBody(Flux.using(() -> stream, s -> content.toFluxByteBuffer() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), s); } catch (IOException ex) { throw LOGGER.logExceptionAsError(new UncheckedIOException(ex)); } }), s -> logBody(logBuilder, logger, contentType, s.toString(StandardCharsets.UTF_8)))); } } private void logBody(LoggingEventBuilder logBuilder, ClientLogger logger, String contentType, String data) { logBuilder.addKeyValue(LoggingKeys.BODY_KEY, prettyPrintIfNeeded(logger, prettyPrintBody, contentType, data)) .log(REQUEST_LOG_MESSAGE); } private final class DefaultHttpResponseLogger implements HttpResponseLogger { @Override public Mono<HttpResponse> logResponse(ClientLogger logger, HttpResponseLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); final HttpResponse response = loggingOptions.getHttpResponse(); if (!logger.canLogAtLevel(logLevel)) { return Mono.just(response); } LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); logContentLength(response, logBuilder); logUrl(loggingOptions, response, logBuilder); logHeaders(logger, response, logBuilder); if (httpLogDetailLevel.shouldLogBody()) { String contentTypeHeader = response.getHeaderValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, response.getHeaders()); if (shouldBodyBeLogged(contentTypeHeader, contentLength)) { return Mono.just(new LoggingHttpResponse(response, logBuilder, logger, (int) contentLength, contentTypeHeader, prettyPrintBody)); } } logBuilder.log(RESPONSE_LOG_MESSAGE); return Mono.just(response); } private void logHeaders(ClientLogger logger, HttpResponse response, LoggingEventBuilder logBuilder) { if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.INFORMATIONAL)) { addHeadersToLogMessage(allowedHeaderNames, response.getHeaders(), logBuilder); } } private void logUrl(HttpResponseLoggingContext loggingOptions, HttpResponse response, LoggingEventBuilder logBuilder) { if (httpLogDetailLevel.shouldLogUrl()) { logBuilder .addKeyValue(LoggingKeys.STATUS_CODE_KEY, response.getStatusCode()) .addKeyValue(LoggingKeys.URL_KEY, getRedactedUrl(response.getRequest().getUrl(), allowedQueryParameterNames)) .addKeyValue(LoggingKeys.DURATION_MS_KEY, loggingOptions.getResponseDuration().toMillis()); } } private void logContentLength(HttpResponse response, LoggingEventBuilder logBuilder) { String contentLengthString = response.getHeaderValue(HttpHeaderName.CONTENT_LENGTH); if (!CoreUtils.isNullOrEmpty(contentLengthString)) { logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, contentLengthString); } } @Override public HttpResponse logResponseSync(ClientLogger logger, HttpResponseLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); final HttpResponse response = loggingOptions.getHttpResponse(); if (!logger.canLogAtLevel(logLevel)) { return response; } LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); logContentLength(response, logBuilder); logUrl(loggingOptions, response, logBuilder); logHeaders(logger, response, logBuilder); if (httpLogDetailLevel.shouldLogBody()) { String contentTypeHeader = response.getHeaderValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, response.getHeaders()); if (shouldBodyBeLogged(contentTypeHeader, contentLength)) { return new LoggingHttpResponse(response, logBuilder, logger, (int) contentLength, contentTypeHeader, prettyPrintBody); } } logBuilder.log(RESPONSE_LOG_MESSAGE); return response; } } /* * Generates the redacted URL for logging. * * @param url URL where the request is being sent. * @return A URL with query parameters redacted based on configurations in this policy. */ private static String getRedactedUrl(URL url, Set<String> allowedQueryParameterNames) { String query = url.getQuery(); if (CoreUtils.isNullOrEmpty(query)) { return url.toString(); } UrlBuilder urlBuilder = ImplUtils.parseUrl(url, false); CoreUtils.parseQueryParameters(query).forEachRemaining(queryParam -> { if (allowedQueryParameterNames.contains(queryParam.getKey().toLowerCase(Locale.ROOT))) { urlBuilder.addQueryParameter(queryParam.getKey(), queryParam.getValue()); } else { urlBuilder.addQueryParameter(queryParam.getKey(), REDACTED_PLACEHOLDER); } }); return urlBuilder.toString(); } /* * Adds HTTP headers into the StringBuilder that is generating the log message. * * @param headers HTTP headers on the request or response. * @param sb StringBuilder that is generating the log message. * @param logLevel Log level the environment is configured to use. */ private static void addHeadersToLogMessage(Set<String> allowedHeaderNames, HttpHeaders headers, LoggingEventBuilder logBuilder) { for (HttpHeader header : headers) { String headerName = header.getName(); logBuilder.addKeyValue(headerName, allowedHeaderNames.contains(headerName.toLowerCase(Locale.ROOT)) ? header.getValue() : REDACTED_PLACEHOLDER); } } /* * Determines and attempts to pretty print the body if it is JSON. * * <p>The body is pretty printed if the Content-Type is JSON and the policy is configured to pretty print JSON.</p> * * @param logger Logger used to log a warning if the body fails to pretty print as JSON. * @param contentType Content-Type header. * @param body Body of the request or response. * @return The body pretty printed if it is JSON, otherwise the unmodified body. */ private static String prettyPrintIfNeeded(ClientLogger logger, boolean prettyPrintBody, String contentType, String body) { String result = body; if (prettyPrintBody && contentType != null && (contentType.startsWith(ContentType.APPLICATION_JSON) || contentType.startsWith("text/json"))) { try { final Object deserialized = PRETTY_PRINTER.readTree(body); result = PRETTY_PRINTER.writeValueAsString(deserialized); } catch (Exception e) { logger.warning("Failed to pretty print JSON", e); } } return result; } /* * Attempts to retrieve and parse the Content-Length header into a numeric representation. * * @param logger Logger used to log a warning if the Content-Length header is an invalid number. * @param headers HTTP headers that are checked for containing Content-Length. * @return */ private static long getContentLength(ClientLogger logger, HttpHeaders headers) { long contentLength = 0; String contentLengthString = headers.getValue(HttpHeaderName.CONTENT_LENGTH); if (CoreUtils.isNullOrEmpty(contentLengthString)) { return contentLength; } try { contentLength = Long.parseLong(contentLengthString); } catch (NumberFormatException | NullPointerException e) { logger.warning("Could not parse the HTTP header content-length: '{}'.", contentLengthString, e); } return contentLength; } /* * Determines if the request or response body should be logged. * * <p>The request or response body is logged if the Content-Type is not "application/octet-stream" and the body * isn't empty and is less than 16KB in size.</p> * * @param contentTypeHeader Content-Type header value. * @param contentLength Content-Length header represented as a numeric. * @return A flag indicating if the request or response body should be logged. */ private static boolean shouldBodyBeLogged(String contentTypeHeader, long contentLength) { return !ContentType.APPLICATION_OCTET_STREAM.equalsIgnoreCase(contentTypeHeader) && contentLength != 0 && contentLength < MAX_BODY_LOG_SIZE; } /* * Gets the request retry count to include in logging. * * If there is no value set, or it isn't a valid number null will be returned indicating that retry count won't be * logged. */ private static Integer getRequestRetryCount(Context context) { Object rawRetryCount = context.getData(RETRY_COUNT_CONTEXT).orElse(null); if (rawRetryCount == null) { return null; } try { return Integer.valueOf(rawRetryCount.toString()); } catch (NumberFormatException ex) { LOGGER.warning("Could not parse the request retry count: '{}'.", rawRetryCount); return null; } } /* * Get or create the ClientLogger for the method having its request and response logged. */ private static ClientLogger getOrCreateMethodLogger(String methodName) { if (CALLER_METHOD_LOGGER_CACHE.size() > LOGGER_CACHE_MAX_SIZE) { CALLER_METHOD_LOGGER_CACHE.clear(); } return CALLER_METHOD_LOGGER_CACHE.computeIfAbsent(methodName, ClientLogger::new); } private static LoggingEventBuilder getLogBuilder(LogLevel logLevel, ClientLogger logger) { switch (logLevel) { case ERROR: return logger.atError(); case WARNING: return logger.atWarning(); case INFORMATIONAL: return logger.atInfo(); case VERBOSE: default: return logger.atVerbose(); } } private static final class LoggingHttpResponse extends HttpResponse { private final HttpResponse actualResponse; private final LoggingEventBuilder logBuilder; private final int contentLength; private final ClientLogger logger; private final boolean prettyPrintBody; private final String contentTypeHeader; private LoggingHttpResponse(HttpResponse actualResponse, LoggingEventBuilder logBuilder, ClientLogger logger, int contentLength, String contentTypeHeader, boolean prettyPrintBody) { super(actualResponse.getRequest()); this.actualResponse = actualResponse; this.logBuilder = logBuilder; this.logger = logger; this.contentLength = contentLength; this.contentTypeHeader = contentTypeHeader; this.prettyPrintBody = prettyPrintBody; } @Override public int getStatusCode() { return actualResponse.getStatusCode(); } @Override @Deprecated public String getHeaderValue(String name) { return actualResponse.getHeaderValue(name); } @Override public String getHeaderValue(HttpHeaderName headerName) { return actualResponse.getHeaderValue(headerName); } @Override public HttpHeaders getHeaders() { return actualResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(contentLength); return Flux.using(() -> stream, s -> actualResponse.getBody() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), s); } catch (IOException ex) { throw LOGGER.logExceptionAsError(new UncheckedIOException(ex)); } }), s -> doLog(s.toString(StandardCharsets.UTF_8))); } @Override public Mono<byte[]> getBodyAsByteArray() { return FluxUtil.collectBytesFromNetworkResponse(getBody(), actualResponse.getHeaders()); } @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray().map(String::new); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray().map(bytes -> new String(bytes, charset)); } @Override public BinaryData getBodyAsBinaryData() { BinaryData content = actualResponse.getBodyAsBinaryData(); doLog(content.toString()); return content; } @Override public void close() { actualResponse.close(); } private void doLog(String body) { logBuilder.addKeyValue(LoggingKeys.BODY_KEY, prettyPrintIfNeeded(logger, prettyPrintBody, contentTypeHeader, body)) .log(RESPONSE_LOG_MESSAGE); } } }
class HttpLoggingPolicy implements HttpPipelinePolicy { private static final ObjectMapperShim PRETTY_PRINTER = ObjectMapperShim.createPrettyPrintMapper(); private static final int MAX_BODY_LOG_SIZE = 1024 * 16; private static final String REDACTED_PLACEHOLDER = "REDACTED"; private static final int LOGGER_CACHE_MAX_SIZE = 1000; private static final Map<String, ClientLogger> CALLER_METHOD_LOGGER_CACHE = new ConcurrentHashMap<>(); private static final ClientLogger LOGGER = new ClientLogger(HttpLoggingPolicy.class); private final HttpLogDetailLevel httpLogDetailLevel; private final Set<String> allowedHeaderNames; private final Set<String> allowedQueryParameterNames; private final boolean prettyPrintBody; private final HttpRequestLogger requestLogger; private final HttpResponseLogger responseLogger; /** * Key for {@link Context} to pass request retry count metadata for logging. */ public static final String RETRY_COUNT_CONTEXT = "requestRetryCount"; private static final String REQUEST_LOG_MESSAGE = "HTTP request"; private static final String RESPONSE_LOG_MESSAGE = "HTTP response"; /** * Creates an HttpLoggingPolicy with the given log configurations. * * @param httpLogOptions The HTTP logging configuration options. */ @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { return next.process(); } final ClientLogger logger = getOrCreateMethodLogger((String) context.getData("caller-method").orElse("")); final long startNs = System.nanoTime(); return requestLogger.logRequest(logger, getRequestLoggingOptions(context)) .then(next.process()) .flatMap(response -> responseLogger.logResponse(logger, getResponseLoggingOptions(response, startNs, context))) .doOnError(throwable -> logger.warning("<-- HTTP FAILED: ", throwable)); } @Override public HttpResponse processSync(HttpPipelineCallContext context, HttpPipelineNextSyncPolicy next) { if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { return next.processSync(); } final ClientLogger logger = getOrCreateMethodLogger((String) context.getData("caller-method").orElse("")); final long startNs = System.nanoTime(); requestLogger.logRequestSync(logger, getRequestLoggingOptions(context)); try { HttpResponse response = next.processSync(); if (response != null) { response = responseLogger.logResponseSync( logger, getResponseLoggingOptions(response, startNs, context)); } return response; } catch (RuntimeException e) { logger.warning("<-- HTTP FAILED: ", e); throw logger.logExceptionAsWarning(e); } } private HttpRequestLoggingContext getRequestLoggingOptions(HttpPipelineCallContext callContext) { return new HttpRequestLoggingContext(callContext.getHttpRequest(), callContext.getContext(), getRequestRetryCount(callContext.getContext())); } private HttpResponseLoggingContext getResponseLoggingOptions(HttpResponse httpResponse, long startNs, HttpPipelineCallContext callContext) { return new HttpResponseLoggingContext(httpResponse, Duration.ofNanos(System.nanoTime() - startNs), callContext.getContext(), getRequestRetryCount(callContext.getContext())); } private final class DefaultHttpRequestLogger implements HttpRequestLogger { @Override public Mono<Void> logRequest(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (logger.canLogAtLevel(logLevel)) { log(logLevel, logger, loggingOptions); } return Mono.empty(); } @Override public void logRequestSync(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (logger.canLogAtLevel(logLevel)) { log(logLevel, logger, loggingOptions); } } private void log(LogLevel logLevel, ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final HttpRequest request = loggingOptions.getHttpRequest(); LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); if (httpLogDetailLevel.shouldLogUrl()) { logBuilder .addKeyValue(LoggingKeys.HTTP_METHOD_KEY, request.getHttpMethod()) .addKeyValue(LoggingKeys.URL_KEY, getRedactedUrl(request.getUrl(), allowedQueryParameterNames)); Integer retryCount = loggingOptions.getTryCount(); if (retryCount != null) { logBuilder.addKeyValue(LoggingKeys.TRY_COUNT_KEY, retryCount); } } if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.INFORMATIONAL)) { addHeadersToLogMessage(allowedHeaderNames, request.getHeaders(), logBuilder); } if (request.getBody() == null) { logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, 0) .log(REQUEST_LOG_MESSAGE); return; } String contentType = request.getHeaders().getValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, request.getHeaders()); logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, contentLength); if (httpLogDetailLevel.shouldLogBody() && shouldBodyBeLogged(contentType, contentLength)) { logBody(request, (int) contentLength, logBuilder, logger, contentType); return; } logBuilder.log(REQUEST_LOG_MESSAGE); } } private void logBody(HttpRequest request, int contentLength, LoggingEventBuilder logBuilder, ClientLogger logger, String contentType) { BinaryData data = request.getBodyAsBinaryData(); BinaryDataContent content = BinaryDataHelper.getContent(data); if (content instanceof StringContent || content instanceof ByteBufferContent || content instanceof SerializableContent || content instanceof ByteArrayContent) { logBody(logBuilder, logger, contentType, content.toString()); } else if (content instanceof InputStreamContent) { byte[] contentBytes = content.toBytes(); request.setBody(contentBytes); logBody(logBuilder, logger, contentType, new String(contentBytes, StandardCharsets.UTF_8)); } else { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(contentLength); request.setBody(Flux.using(() -> stream, s -> content.toFluxByteBuffer() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), s); } catch (IOException ex) { throw LOGGER.logExceptionAsError(new UncheckedIOException(ex)); } }), s -> logBody(logBuilder, logger, contentType, s.toString(StandardCharsets.UTF_8)))); } } private void logBody(LoggingEventBuilder logBuilder, ClientLogger logger, String contentType, String data) { logBuilder.addKeyValue(LoggingKeys.BODY_KEY, prettyPrintIfNeeded(logger, prettyPrintBody, contentType, data)) .log(REQUEST_LOG_MESSAGE); } private final class DefaultHttpResponseLogger implements HttpResponseLogger { @Override public Mono<HttpResponse> logResponse(ClientLogger logger, HttpResponseLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); final HttpResponse response = loggingOptions.getHttpResponse(); if (!logger.canLogAtLevel(logLevel)) { return Mono.just(response); } LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); logContentLength(response, logBuilder); logUrl(loggingOptions, response, logBuilder); logHeaders(logger, response, logBuilder); if (httpLogDetailLevel.shouldLogBody()) { String contentTypeHeader = response.getHeaderValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, response.getHeaders()); if (shouldBodyBeLogged(contentTypeHeader, contentLength)) { return Mono.just(new LoggingHttpResponse(response, logBuilder, logger, (int) contentLength, contentTypeHeader, prettyPrintBody)); } } logBuilder.log(RESPONSE_LOG_MESSAGE); return Mono.just(response); } private void logHeaders(ClientLogger logger, HttpResponse response, LoggingEventBuilder logBuilder) { if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.INFORMATIONAL)) { addHeadersToLogMessage(allowedHeaderNames, response.getHeaders(), logBuilder); } } private void logUrl(HttpResponseLoggingContext loggingOptions, HttpResponse response, LoggingEventBuilder logBuilder) { if (httpLogDetailLevel.shouldLogUrl()) { logBuilder .addKeyValue(LoggingKeys.STATUS_CODE_KEY, response.getStatusCode()) .addKeyValue(LoggingKeys.URL_KEY, getRedactedUrl(response.getRequest().getUrl(), allowedQueryParameterNames)) .addKeyValue(LoggingKeys.DURATION_MS_KEY, loggingOptions.getResponseDuration().toMillis()); } } private void logContentLength(HttpResponse response, LoggingEventBuilder logBuilder) { String contentLengthString = response.getHeaderValue(HttpHeaderName.CONTENT_LENGTH); if (!CoreUtils.isNullOrEmpty(contentLengthString)) { logBuilder.addKeyValue(LoggingKeys.CONTENT_LENGTH_KEY, contentLengthString); } } @Override public HttpResponse logResponseSync(ClientLogger logger, HttpResponseLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); final HttpResponse response = loggingOptions.getHttpResponse(); if (!logger.canLogAtLevel(logLevel)) { return response; } LoggingEventBuilder logBuilder = getLogBuilder(logLevel, logger); logContentLength(response, logBuilder); logUrl(loggingOptions, response, logBuilder); logHeaders(logger, response, logBuilder); if (httpLogDetailLevel.shouldLogBody()) { String contentTypeHeader = response.getHeaderValue(HttpHeaderName.CONTENT_TYPE); long contentLength = getContentLength(logger, response.getHeaders()); if (shouldBodyBeLogged(contentTypeHeader, contentLength)) { return new LoggingHttpResponse(response, logBuilder, logger, (int) contentLength, contentTypeHeader, prettyPrintBody); } } logBuilder.log(RESPONSE_LOG_MESSAGE); return response; } } /* * Generates the redacted URL for logging. * * @param url URL where the request is being sent. * @return A URL with query parameters redacted based on configurations in this policy. */ private static String getRedactedUrl(URL url, Set<String> allowedQueryParameterNames) { String query = url.getQuery(); if (CoreUtils.isNullOrEmpty(query)) { return url.toString(); } UrlBuilder urlBuilder = ImplUtils.parseUrl(url, false); CoreUtils.parseQueryParameters(query).forEachRemaining(queryParam -> { if (allowedQueryParameterNames.contains(queryParam.getKey().toLowerCase(Locale.ROOT))) { urlBuilder.addQueryParameter(queryParam.getKey(), queryParam.getValue()); } else { urlBuilder.addQueryParameter(queryParam.getKey(), REDACTED_PLACEHOLDER); } }); return urlBuilder.toString(); } /* * Adds HTTP headers into the StringBuilder that is generating the log message. * * @param headers HTTP headers on the request or response. * @param sb StringBuilder that is generating the log message. * @param logLevel Log level the environment is configured to use. */ private static void addHeadersToLogMessage(Set<String> allowedHeaderNames, HttpHeaders headers, LoggingEventBuilder logBuilder) { for (HttpHeader header : headers) { String headerName = header.getName(); logBuilder.addKeyValue(headerName, allowedHeaderNames.contains(headerName.toLowerCase(Locale.ROOT)) ? header.getValue() : REDACTED_PLACEHOLDER); } } /* * Determines and attempts to pretty print the body if it is JSON. * * <p>The body is pretty printed if the Content-Type is JSON and the policy is configured to pretty print JSON.</p> * * @param logger Logger used to log a warning if the body fails to pretty print as JSON. * @param contentType Content-Type header. * @param body Body of the request or response. * @return The body pretty printed if it is JSON, otherwise the unmodified body. */ private static String prettyPrintIfNeeded(ClientLogger logger, boolean prettyPrintBody, String contentType, String body) { String result = body; if (prettyPrintBody && contentType != null && (contentType.startsWith(ContentType.APPLICATION_JSON) || contentType.startsWith("text/json"))) { try { final Object deserialized = PRETTY_PRINTER.readTree(body); result = PRETTY_PRINTER.writeValueAsString(deserialized); } catch (Exception e) { logger.warning("Failed to pretty print JSON", e); } } return result; } /* * Attempts to retrieve and parse the Content-Length header into a numeric representation. * * @param logger Logger used to log a warning if the Content-Length header is an invalid number. * @param headers HTTP headers that are checked for containing Content-Length. * @return */ private static long getContentLength(ClientLogger logger, HttpHeaders headers) { long contentLength = 0; String contentLengthString = headers.getValue(HttpHeaderName.CONTENT_LENGTH); if (CoreUtils.isNullOrEmpty(contentLengthString)) { return contentLength; } try { contentLength = Long.parseLong(contentLengthString); } catch (NumberFormatException | NullPointerException e) { logger.warning("Could not parse the HTTP header content-length: '{}'.", contentLengthString, e); } return contentLength; } /* * Determines if the request or response body should be logged. * * <p>The request or response body is logged if the Content-Type is not "application/octet-stream" and the body * isn't empty and is less than 16KB in size.</p> * * @param contentTypeHeader Content-Type header value. * @param contentLength Content-Length header represented as a numeric. * @return A flag indicating if the request or response body should be logged. */ private static boolean shouldBodyBeLogged(String contentTypeHeader, long contentLength) { return !ContentType.APPLICATION_OCTET_STREAM.equalsIgnoreCase(contentTypeHeader) && contentLength != 0 && contentLength < MAX_BODY_LOG_SIZE; } /* * Gets the request retry count to include in logging. * * If there is no value set, or it isn't a valid number null will be returned indicating that retry count won't be * logged. */ private static Integer getRequestRetryCount(Context context) { Object rawRetryCount = context.getData(RETRY_COUNT_CONTEXT).orElse(null); if (rawRetryCount == null) { return null; } try { return Integer.valueOf(rawRetryCount.toString()); } catch (NumberFormatException ex) { LOGGER.warning("Could not parse the request retry count: '{}'.", rawRetryCount); return null; } } /* * Get or create the ClientLogger for the method having its request and response logged. */ private static ClientLogger getOrCreateMethodLogger(String methodName) { if (CALLER_METHOD_LOGGER_CACHE.size() > LOGGER_CACHE_MAX_SIZE) { CALLER_METHOD_LOGGER_CACHE.clear(); } return CALLER_METHOD_LOGGER_CACHE.computeIfAbsent(methodName, ClientLogger::new); } private static LoggingEventBuilder getLogBuilder(LogLevel logLevel, ClientLogger logger) { switch (logLevel) { case ERROR: return logger.atError(); case WARNING: return logger.atWarning(); case INFORMATIONAL: return logger.atInfo(); case VERBOSE: default: return logger.atVerbose(); } } private static final class LoggingHttpResponse extends HttpResponse { private final HttpResponse actualResponse; private final LoggingEventBuilder logBuilder; private final int contentLength; private final ClientLogger logger; private final boolean prettyPrintBody; private final String contentTypeHeader; private LoggingHttpResponse(HttpResponse actualResponse, LoggingEventBuilder logBuilder, ClientLogger logger, int contentLength, String contentTypeHeader, boolean prettyPrintBody) { super(actualResponse.getRequest()); this.actualResponse = actualResponse; this.logBuilder = logBuilder; this.logger = logger; this.contentLength = contentLength; this.contentTypeHeader = contentTypeHeader; this.prettyPrintBody = prettyPrintBody; } @Override public int getStatusCode() { return actualResponse.getStatusCode(); } @Override @Deprecated public String getHeaderValue(String name) { return actualResponse.getHeaderValue(name); } @Override public String getHeaderValue(HttpHeaderName headerName) { return actualResponse.getHeaderValue(headerName); } @Override public HttpHeaders getHeaders() { return actualResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(contentLength); return Flux.using(() -> stream, s -> actualResponse.getBody() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), s); } catch (IOException ex) { throw LOGGER.logExceptionAsError(new UncheckedIOException(ex)); } }), s -> doLog(s.toString(StandardCharsets.UTF_8))); } @Override public Mono<byte[]> getBodyAsByteArray() { return FluxUtil.collectBytesFromNetworkResponse(getBody(), actualResponse.getHeaders()); } @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray().map(String::new); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray().map(bytes -> new String(bytes, charset)); } @Override public BinaryData getBodyAsBinaryData() { BinaryData content = actualResponse.getBodyAsBinaryData(); doLog(content.toString()); return content; } @Override public void close() { actualResponse.close(); } private void doLog(String body) { logBuilder.addKeyValue(LoggingKeys.BODY_KEY, prettyPrintIfNeeded(logger, prettyPrintBody, contentTypeHeader, body)) .log(RESPONSE_LOG_MESSAGE); } } }
At some point we should make this a little nice so we can easily add more values. I am thinking of the following HTTP clients that I personally use: `httpie`, `IntelliJ HTTP Client` and `Postman` (although I am not sure what values they use for the header - if any)
static String pickFirstSupportedAndAcceptedContentType(RoutingContext context) { List<MIMEHeader> acceptableTypes = context.parsedHeaders().accept(); String userAgent = context.request().getHeader("User-Agent"); if (userAgent != null && (userAgent.toLowerCase(Locale.ROOT).startsWith("wget/") || userAgent.toLowerCase(Locale.ROOT).startsWith("curl/"))) { MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED_CURL); return result == null ? null : result.value(); } else { MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED); return result == null ? null : result.value(); } }
|| userAgent.toLowerCase(Locale.ROOT).startsWith("curl/"))) {
static String pickFirstSupportedAndAcceptedContentType(RoutingContext context) { List<MIMEHeader> acceptableTypes = context.parsedHeaders().accept(); String userAgent = context.request().getHeader("User-Agent"); if (userAgent != null && (userAgent.toLowerCase(Locale.ROOT).startsWith("wget/") || userAgent.toLowerCase(Locale.ROOT).startsWith("curl/"))) { MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED_CURL); return result == null ? null : result.value(); } else { MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED); return result == null ? null : result.value(); } }
class ContentTypes { private ContentTypes() { } private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_JSON = "text/json"; private static final String TEXT_HTML = "text/html"; private static final String TEXT_PLAIN = "text/plain"; private static final String APPLICATION_XHTML = "application/xhtml+xml"; private static final String APPLICATION_XML = "application/xml"; private static final String TEXT_XML = "text/xml"; private static final MIMEHeader[] BASE_HEADERS = { new ParsableMIMEValue(APPLICATION_JSON).forceParse(), new ParsableMIMEValue(TEXT_JSON).forceParse(), new ParsableMIMEValue(TEXT_HTML).forceParse(), new ParsableMIMEValue(APPLICATION_XHTML).forceParse(), new ParsableMIMEValue(APPLICATION_XML).forceParse(), new ParsableMIMEValue(TEXT_XML).forceParse() }; private static final Collection<MIMEHeader> SUPPORTED = new ArrayList<>(Arrays.asList(BASE_HEADERS)); private static final Collection<MIMEHeader> SUPPORTED_CURL = new ArrayList<>(); static { SUPPORTED_CURL.add(new ParsableMIMEValue(TEXT_PLAIN).forceParse()); SUPPORTED_CURL.addAll(Arrays.asList(BASE_HEADERS)); ((ArrayList<MIMEHeader>) SUPPORTED).add(new ParsableMIMEValue(TEXT_PLAIN).forceParse()); } }
class ContentTypes { private ContentTypes() { } private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_JSON = "text/json"; private static final String TEXT_HTML = "text/html"; private static final String TEXT_PLAIN = "text/plain"; private static final String APPLICATION_XHTML = "application/xhtml+xml"; private static final String APPLICATION_XML = "application/xml"; private static final String TEXT_XML = "text/xml"; private static final MIMEHeader[] BASE_HEADERS = { new ParsableMIMEValue(APPLICATION_JSON).forceParse(), new ParsableMIMEValue(TEXT_JSON).forceParse(), new ParsableMIMEValue(TEXT_HTML).forceParse(), new ParsableMIMEValue(APPLICATION_XHTML).forceParse(), new ParsableMIMEValue(APPLICATION_XML).forceParse(), new ParsableMIMEValue(TEXT_XML).forceParse() }; private static final Collection<MIMEHeader> SUPPORTED = new ArrayList<>(Arrays.asList(BASE_HEADERS)); private static final Collection<MIMEHeader> SUPPORTED_CURL = new ArrayList<>(); static { SUPPORTED_CURL.add(new ParsableMIMEValue(TEXT_PLAIN).forceParse()); SUPPORTED_CURL.addAll(Arrays.asList(BASE_HEADERS)); ((ArrayList<MIMEHeader>) SUPPORTED).add(new ParsableMIMEValue(TEXT_PLAIN).forceParse()); } }
Yeah, this can be ignored since we have set the binder name as "kafka"
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof BindingServiceProperties) { BindingServiceProperties bindingServiceProperties = (BindingServiceProperties) bean; if (bindingServiceProperties.getBinders().isEmpty()) { BinderProperties kafkaBinderSourceProperty = new BinderProperties(); kafkaBinderSourceProperty.setType(KAKFA_BINDER_TYPE); configureBinderSources(kafkaBinderSourceProperty, AzureKafkaSpringCloudStreamConfiguration.AZURE_KAFKA_SPRING_CLOUD_STREAM_CONFIGURATION_CLASS); Map<String, BinderProperties> kafkaBinderPropertyMap = new HashMap<>(); kafkaBinderPropertyMap.put(KAKFA_BINDER_DEFAULT_NAME, kafkaBinderSourceProperty); bindingServiceProperties.setBinders(kafkaBinderPropertyMap); } else { for (Map.Entry<String, BinderProperties> entry : bindingServiceProperties.getBinders().entrySet()) { if (entry.getKey() != null && entry.getValue() != null && (KAKFA_BINDER_TYPE.equalsIgnoreCase(entry.getValue().getType()) || KAKFA_BINDER_DEFAULT_NAME.equalsIgnoreCase(entry.getKey()))) { configureBinderSources(entry.getValue(), buildKafkaBinderSources(entry.getValue())); } } } } return bean; }
kafkaBinderSourceProperty.setType(KAKFA_BINDER_TYPE);
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof BindingServiceProperties) { BindingServiceProperties bindingServiceProperties = (BindingServiceProperties) bean; if (bindingServiceProperties.getBinders().isEmpty()) { BinderProperties kafkaBinderSourceProperty = new BinderProperties(); configureBinderSources(kafkaBinderSourceProperty, AzureKafkaSpringCloudStreamConfiguration.AZURE_KAFKA_SPRING_CLOUD_STREAM_CONFIGURATION_CLASS); Map<String, BinderProperties> kafkaBinderPropertyMap = new HashMap<>(); kafkaBinderPropertyMap.put(KAKFA_BINDER_DEFAULT_NAME, kafkaBinderSourceProperty); bindingServiceProperties.setBinders(kafkaBinderPropertyMap); } else { for (Map.Entry<String, BinderProperties> entry : bindingServiceProperties.getBinders().entrySet()) { if (entry.getKey() != null && entry.getValue() != null && (KAKFA_BINDER_TYPE.equalsIgnoreCase(entry.getValue().getType()) || KAKFA_BINDER_DEFAULT_NAME.equalsIgnoreCase(entry.getKey()))) { configureBinderSources(entry.getValue(), buildKafkaBinderSources(entry.getValue())); } } } } return bean; }
class BindingServicePropertiesBeanPostProcessor implements BeanPostProcessor { static final String SPRING_MAIN_SOURCES_PROPERTY = "spring.main.sources"; private static final String KAKFA_BINDER_DEFAULT_NAME = "kafka"; private static final String KAKFA_BINDER_TYPE = "kafka"; @Override private String buildKafkaBinderSources(BinderProperties binderProperties) { StringBuilder sources = new StringBuilder(AzureKafkaSpringCloudStreamConfiguration.AZURE_KAFKA_SPRING_CLOUD_STREAM_CONFIGURATION_CLASS); if (binderProperties.getEnvironment().get(SPRING_MAIN_SOURCES_PROPERTY) != null) { sources.append("," + binderProperties.getEnvironment().get(SPRING_MAIN_SOURCES_PROPERTY)); } return sources.toString(); } private void configureBinderSources(BinderProperties binderProperties, String sources) { binderProperties.getEnvironment().put(SPRING_MAIN_SOURCES_PROPERTY, sources); } }
class BindingServicePropertiesBeanPostProcessor implements BeanPostProcessor { static final String SPRING_MAIN_SOURCES_PROPERTY = "spring.main.sources"; private static final String KAKFA_BINDER_DEFAULT_NAME = "kafka"; private static final String KAKFA_BINDER_TYPE = "kafka"; @Override private String buildKafkaBinderSources(BinderProperties binderProperties) { StringBuilder sources = new StringBuilder(AzureKafkaSpringCloudStreamConfiguration.AZURE_KAFKA_SPRING_CLOUD_STREAM_CONFIGURATION_CLASS); if (binderProperties.getEnvironment().get(SPRING_MAIN_SOURCES_PROPERTY) != null) { sources.append("," + binderProperties.getEnvironment().get(SPRING_MAIN_SOURCES_PROPERTY)); } return sources.toString(); } private void configureBinderSources(BinderProperties binderProperties, String sources) { binderProperties.getEnvironment().put(SPRING_MAIN_SOURCES_PROPERTY, sources); } }
why compare row number first then sort key ?
public int compare(CandidateContext context1, CandidateContext context2) { int ret = Integer.compare(context1.getGroupbyColumnNum(), context2.getGroupbyColumnNum()); if (ret != 0) { return ret; } ret = Double.compare(context1.getMvStatistics().getOutputRowCount(), context2.getMvStatistics().getOutputRowCount()); if (ret != 0) { return ret; } ret = Integer.compare(context2.sortScore, context1.sortScore); if (ret != 0) { return ret; } ret = Integer.compare(context1.getSchemaColumnNum(), context2.getSchemaColumnNum()); if (ret != 0) { return ret; } ret = Double.compare(context1.getMvStatistics().getComputeSize(), context2.getMvStatistics().getComputeSize()); return ret != 0 ? ret : Integer.compare(context1.getIndex(), context2.getIndex()); }
public int compare(CandidateContext context1, CandidateContext context2) { int ret = Integer.compare(context1.getGroupbyColumnNum(), context2.getGroupbyColumnNum()); if (ret != 0) { return ret; } ret = Integer.compare(context2.sortScore, context1.sortScore); if (ret != 0) { return ret; } ret = Double.compare(context1.getMvStatistics().getOutputRowCount(), context2.getMvStatistics().getOutputRowCount()); if (ret != 0) { return ret; } ret = Integer.compare(context1.getSchemaColumnNum(), context2.getSchemaColumnNum()); if (ret != 0) { return ret; } ret = Double.compare(context1.getMvStatistics().getComputeSize(), context2.getMvStatistics().getComputeSize()); return ret != 0 ? ret : Integer.compare(context1.getIndex(), context2.getIndex()); }
class CandidateContextComparator implements Comparator<CandidateContext> { @Override }
class CandidateContextComparator implements Comparator<CandidateContext> { @Override }
Ah wait, I see what you done here. The second cast is unused
public Runnable loadRealm(RuntimeValue<SecurityRealm> realm, SecurityUsersConfig propertiesConfig) throws Exception { return new Runnable() { @Override public void run() { try { PropertiesRealmConfig config = propertiesConfig.file(); log.debugf("loadRealm, config=%s", config); SecurityRealm secRealm = realm.getValue(); if (!(secRealm instanceof LegacyPropertiesSecurityRealm propsRealm)) { return; } log.debugf("Trying to loader users: /%s", config.users()); URL users; Path p = Paths.get(config.users()); if (Files.exists(p)) { users = p.toUri().toURL(); } else { users = Thread.currentThread().getContextClassLoader().getResource(config.users()); } log.debugf("users: %s", users); log.debugf("Trying to loader roles: %s", config.roles()); URL roles; p = Paths.get(config.roles()); if (Files.exists(p)) { roles = p.toUri().toURL(); } else { roles = Thread.currentThread().getContextClassLoader().getResource(config.roles()); } log.debugf("roles: %s", roles); if (users == null && roles == null) { String msg = String.format( "No PropertiesRealmConfig users/roles settings found. Configure the quarkus.security.file.%s properties", PropertiesRealmConfig.help()); throw new IllegalStateException(msg); } ClassPathUtils.consumeStream(users, usersStream -> { try { ClassPathUtils.consumeStream(roles, rolesStream -> { try { propsRealm.load(usersStream, rolesStream); } catch (IOException e) { throw new UncheckedIOException(e); } }); } catch (IOException e) { throw new UncheckedIOException(e); } }); } catch (IOException e) { throw new UncheckedIOException(e); } } }; }
if (!(secRealm instanceof LegacyPropertiesSecurityRealm propsRealm)) {
public Runnable loadRealm(RuntimeValue<SecurityRealm> realm, SecurityUsersConfig propertiesConfig) throws Exception { return new Runnable() { @Override public void run() { try { PropertiesRealmConfig config = propertiesConfig.file(); log.debugf("loadRealm, config=%s", config); SecurityRealm secRealm = realm.getValue(); if (!(secRealm instanceof LegacyPropertiesSecurityRealm propsRealm)) { return; } log.debugf("Trying to loader users: /%s", config.users()); URL users; Path p = Paths.get(config.users()); if (Files.exists(p)) { users = p.toUri().toURL(); } else { users = Thread.currentThread().getContextClassLoader().getResource(config.users()); } log.debugf("users: %s", users); log.debugf("Trying to loader roles: %s", config.roles()); URL roles; p = Paths.get(config.roles()); if (Files.exists(p)) { roles = p.toUri().toURL(); } else { roles = Thread.currentThread().getContextClassLoader().getResource(config.roles()); } log.debugf("roles: %s", roles); if (users == null && roles == null) { String msg = String.format( "No PropertiesRealmConfig users/roles settings found. Configure the quarkus.security.file.%s properties", PropertiesRealmConfig.help()); throw new IllegalStateException(msg); } ClassPathUtils.consumeStream(users, usersStream -> { try { ClassPathUtils.consumeStream(roles, rolesStream -> { try { propsRealm.load(usersStream, rolesStream); } catch (IOException e) { throw new UncheckedIOException(e); } }); } catch (IOException e) { throw new UncheckedIOException(e); } }); } catch (IOException e) { throw new UncheckedIOException(e); } } }; }
class ElytronPropertiesFileRecorder { static final Logger log = Logger.getLogger(ElytronPropertiesFileRecorder.class); private static final Provider[] PROVIDERS = new Provider[] { new WildFlyElytronPasswordProvider() }; /** * Load the user.properties and roles.properties files into the {@linkplain SecurityRealm} * * @param realm - a {@linkplain LegacyPropertiesSecurityRealm} * @param propertiesConfig - properties config with a realm configuration info * @throws Exception */ /** * Load the embedded user and role information into the {@linkplain SecurityRealm} * * @param realm - a {@linkplain SimpleMapBackedSecurityRealm} * @param propertiesConfig - properties config with the realm config * @throws Exception */ public Runnable loadEmbeddedRealm(RuntimeValue<SecurityRealm> realm, SecurityUsersConfig propertiesConfig, MPRealmRuntimeConfig runtimeConfig) throws Exception { return new Runnable() { @Override public void run() { MPRealmConfig config = propertiesConfig.embedded(); log.debugf("loadRealm, config=%s", config); SecurityRealm secRealm = realm.getValue(); if (!(secRealm instanceof SimpleMapBackedSecurityRealm memRealm)) { return; } HashMap<String, SimpleRealmEntry> identityMap = new HashMap<>(); Map<String, String> userInfo = runtimeConfig.users(); log.debugf("UserInfoMap: %s%n", userInfo); Map<String, String> roleInfo = runtimeConfig.roles(); log.debugf("RoleInfoMap: %s%n", roleInfo); for (Map.Entry<String, String> userPasswordEntry : userInfo.entrySet()) { Password password; String user = userPasswordEntry.getKey(); if (runtimeConfig.plainText()) { password = ClearPassword.createRaw(ClearPassword.ALGORITHM_CLEAR, userPasswordEntry.getValue().toCharArray()); } else { try { byte[] hashed = ByteIterator.ofBytes(userPasswordEntry.getValue().getBytes(StandardCharsets.UTF_8)) .asUtf8String().hexDecode().drain(); password = PasswordFactory .getInstance(runtimeConfig.algorithm().getName(), new WildFlyElytronPasswordProvider()) .generatePassword(new DigestPasswordSpec(user, config.realmName(), hashed)); } catch (Exception e) { throw new RuntimeException("Unable to register password for user:" + user + " make sure it is a valid hex encoded " + runtimeConfig.algorithm().getName().toUpperCase() + " hash", e); } } PasswordCredential passwordCred = new PasswordCredential(password); List<Credential> credentials = new ArrayList<>(); credentials.add(passwordCred); String rawRoles = roleInfo.get(user); String[] roles = rawRoles != null ? rawRoles.split(",") : new String[0]; Attributes attributes = new MapAttributes(); for (String role : roles) { attributes.addLast("groups", role); } SimpleRealmEntry entry = new SimpleRealmEntry(credentials, attributes); identityMap.put(user, entry); log.debugf("Added user(%s), roles=%s%n", user, attributes.get("groups")); } memRealm.setIdentityMap(identityMap); } }; } /** * Create a runtime value for a {@linkplain LegacyPropertiesSecurityRealm} * * @param propertiesConfig - properties config * @return - runtime value wrapper for the SecurityRealm * @throws Exception */ public RuntimeValue<SecurityRealm> createRealm(SecurityUsersConfig propertiesConfig) throws Exception { PropertiesRealmConfig config = propertiesConfig.file(); log.debugf("createRealm, config=%s", config); SecurityRealm realm = LegacyPropertiesSecurityRealm.builder() .setDefaultRealm(config.realmName()) .setProviders(new Supplier<Provider[]>() { @Override public Provider[] get() { return PROVIDERS; } }) .setPlainText(config.plainText()) .build(); return new RuntimeValue<>(realm); } /** * Create a runtime value for a {@linkplain SimpleMapBackedSecurityRealm} * * @param propertiesConfig - properties config with the realm config * @return - runtime value wrapper for the SecurityRealm * @throws Exception */ public RuntimeValue<SecurityRealm> createEmbeddedRealm(SecurityUsersConfig propertiesConfig) { MPRealmConfig config = propertiesConfig.embedded(); log.debugf("createRealm, config=%s", config); Supplier<Provider[]> providers = new Supplier<Provider[]>() { @Override public Provider[] get() { return PROVIDERS; } }; SecurityRealm realm = new SimpleMapBackedSecurityRealm(NameRewriter.IDENTITY_REWRITER, providers); return new RuntimeValue<>(realm); } }
class ElytronPropertiesFileRecorder { static final Logger log = Logger.getLogger(ElytronPropertiesFileRecorder.class); private static final Provider[] PROVIDERS = new Provider[] { new WildFlyElytronPasswordProvider() }; /** * Load the user.properties and roles.properties files into the {@linkplain SecurityRealm} * * @param realm - a {@linkplain LegacyPropertiesSecurityRealm} * @param propertiesConfig - properties config with a realm configuration info * @throws Exception */ /** * Load the embedded user and role information into the {@linkplain SecurityRealm} * * @param realm - a {@linkplain SimpleMapBackedSecurityRealm} * @param propertiesConfig - properties config with the realm config * @throws Exception */ public Runnable loadEmbeddedRealm(RuntimeValue<SecurityRealm> realm, SecurityUsersConfig propertiesConfig, MPRealmRuntimeConfig runtimeConfig) throws Exception { return new Runnable() { @Override public void run() { MPRealmConfig config = propertiesConfig.embedded(); log.debugf("loadRealm, config=%s", config); SecurityRealm secRealm = realm.getValue(); if (!(secRealm instanceof SimpleMapBackedSecurityRealm memRealm)) { return; } HashMap<String, SimpleRealmEntry> identityMap = new HashMap<>(); Map<String, String> userInfo = runtimeConfig.users(); log.debugf("UserInfoMap: %s%n", userInfo); Map<String, String> roleInfo = runtimeConfig.roles(); log.debugf("RoleInfoMap: %s%n", roleInfo); for (Map.Entry<String, String> userPasswordEntry : userInfo.entrySet()) { Password password; String user = userPasswordEntry.getKey(); if (runtimeConfig.plainText()) { password = ClearPassword.createRaw(ClearPassword.ALGORITHM_CLEAR, userPasswordEntry.getValue().toCharArray()); } else { try { byte[] hashed = ByteIterator.ofBytes(userPasswordEntry.getValue().getBytes(StandardCharsets.UTF_8)) .asUtf8String().hexDecode().drain(); password = PasswordFactory .getInstance(runtimeConfig.algorithm().getName(), new WildFlyElytronPasswordProvider()) .generatePassword(new DigestPasswordSpec(user, config.realmName(), hashed)); } catch (Exception e) { throw new RuntimeException("Unable to register password for user:" + user + " make sure it is a valid hex encoded " + runtimeConfig.algorithm().getName().toUpperCase() + " hash", e); } } PasswordCredential passwordCred = new PasswordCredential(password); List<Credential> credentials = new ArrayList<>(); credentials.add(passwordCred); String rawRoles = roleInfo.get(user); String[] roles = rawRoles != null ? rawRoles.split(",") : new String[0]; Attributes attributes = new MapAttributes(); for (String role : roles) { attributes.addLast("groups", role); } SimpleRealmEntry entry = new SimpleRealmEntry(credentials, attributes); identityMap.put(user, entry); log.debugf("Added user(%s), roles=%s%n", user, attributes.get("groups")); } memRealm.setIdentityMap(identityMap); } }; } /** * Create a runtime value for a {@linkplain LegacyPropertiesSecurityRealm} * * @param propertiesConfig - properties config * @return - runtime value wrapper for the SecurityRealm * @throws Exception */ public RuntimeValue<SecurityRealm> createRealm(SecurityUsersConfig propertiesConfig) throws Exception { PropertiesRealmConfig config = propertiesConfig.file(); log.debugf("createRealm, config=%s", config); SecurityRealm realm = LegacyPropertiesSecurityRealm.builder() .setDefaultRealm(config.realmName()) .setProviders(new Supplier<Provider[]>() { @Override public Provider[] get() { return PROVIDERS; } }) .setPlainText(config.plainText()) .build(); return new RuntimeValue<>(realm); } /** * Create a runtime value for a {@linkplain SimpleMapBackedSecurityRealm} * * @param propertiesConfig - properties config with the realm config * @return - runtime value wrapper for the SecurityRealm * @throws Exception */ public RuntimeValue<SecurityRealm> createEmbeddedRealm(SecurityUsersConfig propertiesConfig) { MPRealmConfig config = propertiesConfig.embedded(); log.debugf("createRealm, config=%s", config); Supplier<Provider[]> providers = new Supplier<Provider[]>() { @Override public Provider[] get() { return PROVIDERS; } }; SecurityRealm realm = new SimpleMapBackedSecurityRealm(NameRewriter.IDENTITY_REWRITER, providers); return new RuntimeValue<>(realm); } }
I think we don't need `parseXMLStepExtendList`method at all :) Reason is `.<`, `.` and `[` are already handled by parseExpressionRhs method. So xml-step-expression node always have only `lhsExpr` and `xmlStepStart`
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_EXPRESSION); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode identifier = parseStatementStartIdentifier(); STNode stmt = parseStatementStartIdentifierRhs(annots, identifier); endContext(); return stmt; } private STNode parseStatementStartIdentifierRhs(STNode annots, STNode identifier) { return parseStatementStartIdentifierRhs(peek().kind, annots, identifier); } private STNode parseStatementStartIdentifierRhs(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: STNode tbpOrMemberAccess = parseTypedBindingPatternOrMemberAccess(identifier, false, ParserRuleContext.AMBIGUOUS_STMT); if (tbpOrMemberAccess.kind == SyntaxKind.INDEXED_EXPRESSION) { STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, tbpOrMemberAccess, false, false); return parseStatementStartWithExpr(annots, expr); } STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, tbpOrMemberAccess, false); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: return parseTypeDescStartsWithIdentifier(identifier, annots); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStatementStartWithExpr(nextTokenKind, annots, identifier); case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) { return parseTypeDescStartsWithIdentifier(identifier, annots); } default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, identifier, false, true); return parseStatementStartWithExpr(annots, expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_IDENTIFIER_RHS, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartIdentifierRhs(solution.tokenKind, annots, identifier); } } private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) { switchContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseExpressionStament(SyntaxKind nextTokenKind, STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots); STNode stmt = getExpressionAsStatement(expression); endContext(); return stmt; } private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots); STNode stmt = parseStatementStartWithExpr(annots, expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @param annots Annotations * @return Parsed node */ private STNode parseStatementStartWithExpr(STNode annots, STNode expression) { STToken nextToken = peek(); return parseStatementStartWithExpr(nextToken.kind, annots, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @param annots Annotations * @return Parsed node */ private STNode parseStatementStartWithExpr(SyntaxKind nextTokenKind, STNode annots, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, annots, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartWithExpr(solution.tokenKind, annots, expression); } } private STNode getArrayLength(STNodeList exprs) { if (exprs.isEmpty()) { return STNodeFactory.createEmptyNode(); } STNode lengthExpr = exprs.get(0); switch (lengthExpr.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "invalid array length"); break; } return lengthExpr; } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { if (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return isMissingNode(((STSimpleNameReferenceNode) node).name); } return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; switch (nextTokenKind) { case DEFAULT_KEYWORD: name = parseDefaultKeyword(); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); if (solution.action == Action.REMOVE) { name = solution.recoveredNode; break; } return parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow, STNode name) { switch (nextTokenKind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name); } } /** * Parse default keyword. * * @return default keyword node */ private STNode parseDefaultKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DEFAULT_KEYWORD) { return STNodeFactory.createSimpleNameReferenceNode(consume()); } else { Solution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD); return sol.recoveredNode; } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing attach point"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { this.errorHandler.reportMissingTokenError("missing attach point"); attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseConstExprInternal(); endContext(); return expr; } private STNode parseConstExprInternal() { STToken nextToken = peek(); return parseConstExprInternal(nextToken.kind); } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExprInternal(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_BRACE_TOKEN: return parseNilLiteral(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return solution.recoveredNode; } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(context); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PIPE); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case READONLY_KEYWORD: case DISTINCT_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultipleNamedWorkerDeclarations() { ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultipleNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode expr = parseExpression(); expressions.add(expr); nextToken = peek(); STNode listConstructorMemberEnd; while (!isEndOfListConstructor(nextToken.kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(nextToken.kind); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); expr = parseExpression(); expressions.add(expr); nextToken = peek(); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { return parseListConstructorMemberEnd(peek().kind); } private STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListConstructorMemberEnd(solution.tokenKind); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD); return sol.recoveredNode; } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IN_KEYWORD); return sol.recoveredNode; } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD); return sol.recoveredNode; } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { List<STNode> mappings = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode leadingComma; while (!isEndOfTableRowList(nextToken.kind)) { leadingComma = parseComma(); mappings.add(leadingComma); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD); return sol.recoveredNode; } } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { List<STNode> fieldNames = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [error-type-param] * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyWord(); STNode errorTypeParamsNode; STToken nextToken = peek(); STToken nextNextToken = peek(2); if (nextToken.kind == SyntaxKind.LT_TOKEN || nextNextToken.kind == SyntaxKind.GT_TOKEN) { errorTypeParamsNode = parseErrorTypeParamsNode(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error type param node. * <p> * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeParamsNode() { STNode ltToken = parseLTToken(); STNode parameter; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { parameter = consume(); } else { parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } STNode gtToken = parseGTToken(); return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyWord() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ERROR_KEYWORD); return sol.recoveredNode; } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor() { STNode streamKeywordToken = parseStreamKeyword(); STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { return parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode); } private STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (nextTokenKind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STREAM_KEYWORD); return sol.recoveredNode; } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LET_KEYWORD); return sol.recoveredNode; } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing let variable declaration"); return STNodeFactory.createNodeList(varDecls); } STNode varDec = parseLetVarDec(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDec(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDec(boolean isRhsExpr) { STNode annot = parseAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_KEYWORD); return sol.recoveredNode; } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XML_KEYWORD); return sol.recoveredNode; } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } TextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader())); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); removeAdditionalTokensInInterpolation(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return sol.recoveredNode; } } /** * Remove if there any tokens left after the expression inside the interpolation. */ private void removeAdditionalTokensInInterpolation() { while (true) { STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: return; case CLOSE_BRACE_TOKEN: return; default: consume(); this.errorHandler.reportInvalidNode(nextToken, "invalid token '" + nextToken.text() + "'"); } } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { Solution sol = recover(token, ctx); return sol.recoveredNode; } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor() { STNode tableKeywordToken = parseTableKeyword(); STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.KEY_KEYWORD) { STNode keyKeywordToken = parseKeyKeyword(); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { return parseKeyConstraint(peek().kind, keyKeywordToken); } private STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) { switch (nextTokenKind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: Solution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseKeyConstraint(solution.tokenKind, keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := function function-signature</code> * * @return Function type descriptor node */ private STNode parseFunctionTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); STNode signature = parseFuncSignature(true); endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature); } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code> * * @param annots Annotations. * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @return */ private STNode parseAnonFuncBody() { return parseAnonFuncBody(peek().kind); } private STNode parseAnonFuncBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true); default: Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnonFuncBody(solution.tokenKind); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return sol.recoveredNode; } } private STNode parseImplicitAnonFunc(STNode params) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: this.errorHandler.reportInvalidNode(null, "lhs must be an identifier or a param list"); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param params Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode params) { List<STNode> paramList = new ArrayList<>(); paramList.add(params.expression); return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen, STNodeFactory.createNodeList(paramList), params.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(nextToken.kind); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams); } private STNode parseImplicitAnonFuncParamEnd() { return parseImplicitAnonFuncParamEnd(peek().kind); } private STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImplicitAnonFuncParamEnd(solution.tokenKind); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing type-desc"); return STNodeFactory.createNodeList(new ArrayList<>()); } STNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(nextToken.kind); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { return parseTupleMemberRhs(peek().kind); } private STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: Solution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTupleMemberRhs(solution.tokenKind); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) { STNode queryConstructType; switch (nextTokenKind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseStreamKeyword(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { return parseTableConstructorOrQuery(peek().kind, tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQuery(solution.tokenKind, tableKeyword, isRhsExpr); } } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { return parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (nextTokenKind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode tableKeyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(tableKeyword, keySpecifier); } /** * Parse query expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { this.errorHandler.reportInvalidNode(null, "extra clauses after select clause"); break; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); if (peek().kind == SyntaxKind.DO_KEYWORD) { return parseQueryAction(queryPipeline, selectClause); } if (selectClause == null) { this.errorHandler.reportMissingTokenError("missing select clause"); STNode selectKeyword = STNodeFactory.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); } return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { return parseIntermediateClause(peek().kind, isRhsExpr); } private STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) { switch (nextTokenKind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseIntermediateClause(solution.tokenKind, isRhsExpr); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); STNode varName = parseVariableName(); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, type, varName, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FROM_KEYWORD); return sol.recoveredNode; } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHERE_KEYWORD); return sol.recoveredNode; } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SELECT_KEYWORD); return sol.recoveredNode; } } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); validateExprInStartAction(expr); return STNodeFactory.createStartActionNode(annots, startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.START_KEYWORD); return sol.recoveredNode; } } private void validateExprInStartAction(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the start keyword must be a " + "func-call, a method-call or a remote-method-call"); break; } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD); return sol.recoveredNode; } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptor(context); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: literal = consume(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(tokenKind); switch (tokenKind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Parse binding-patterns. * * binding-pattern := * capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * capture-binding-pattern := variable-name * variable-name := identifier * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * list-member-binding-patterns := * binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := * field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * field-binding-pattern := * field-name : binding-pattern * | variable-name * rest-binding-pattern := ... variable-name * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * arg-list-binding-pattern := * positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * positional-arg-binding-pattern := binding-pattern * other-arg-binding-patterns := * named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * named-arg-binding-pattern := arg-name = binding-pattern * * @return binding-pattern node */ private STNode parseBindingPattern() { STToken token = peek(); return parseBindingPattern(token.kind); } private STNode parseBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseCaptureBindingPattern(); default: Solution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return parseBindingPattern(sol.tokenKind); } } /** * Parse capture-binding-pattern. * * capture-binding-pattern := variable-name * variable-name := identifier * * @return capture-binding-pattern node */ private STNode parseCaptureBindingPattern() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode varName = parseVariableName(); return STNodeFactory.createCaptureBindingPatternNode(varName); } else { Solution sol = recover(token, ParserRuleContext.CAPTURE_BINDING_PATTERN); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return STNodeFactory.createCaptureBindingPatternNode(sol.recoveredNode); } } /** * Parse list-binding-patterns. * * list-binding-pattern := [ list-member-binding-patterns ] * list-member-binding-patterns := * binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingpatternRhs(token.kind); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1); } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } private STNode parseListBindingpatternRhs() { return parseListBindingpatternRhs(peek().kind); } private STNode parseListBindingpatternRhs(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_END_OR_CONTINUE); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListBindingpatternRhs(solution.tokenKind); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern entry. * * list-binding-pattern := [ list-member-binding-patterns ] * list-member-binding-patterns := * binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * * @return rest-binding-pattern node */ private STNode parseListBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); return STNodeFactory.createRestBindingPatternNode(ellipsis, varName); default: return parseBindingPattern(); } } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex + 1).kind); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case SERVICE_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex + 1); case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: Solution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME); return sol.recoveredNode; } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return sol.recoveredNode; } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { return parseReceiveWorkers(peek().kind); } private STNode parseReceiveWorkers(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case DEFAULT_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseReceiveWorkers(solution.tokenKind); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing receive field"); return STNodeFactory.createNodeList(new ArrayList<>()); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(nextToken.kind); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseReceiveFieldEnd(solution.tokenKind); } } private STNode parseReceiveField() { return parseReceiveField(peek().kind); } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseReceiveField(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case DEFAULT_KEYWORD: return parseDefaultKeyword(); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return createQualifiedReceiveField(solution.recoveredNode); } return solution.recoveredNode; } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return sol.recoveredNode; } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = parseGTToken(); validateRightShiftOperatorWS(openGTToken); STNode endLGToken = parseGTToken(); return STNodeFactory.createDoubleGTTokenNode(openGTToken, endLGToken); } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = parseGTToken(); validateRightShiftOperatorWS(openGTToken); STNode middleGTToken = parseGTToken(); validateRightShiftOperatorWS(middleGTToken); STNode endLGToken = parseGTToken(); return STNodeFactory.createTrippleGTTokenNode(openGTToken, middleGTToken, endLGToken); } /** * Validate the whitespace between '>' tokens of right shift operators. * * @param node Preceding node */ private void validateRightShiftOperatorWS(STNode node) { int diff = node.widthWithTrailingMinutiae() - node.width(); if (diff > 0) { this.errorHandler.reportMissingTokenError("no whitespaces allowed between >>"); } } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WAIT_KEYWORD); return sol.recoveredNode; } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); List<STNode> waitFutureExprList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing wait field"); endContext(); STNode waitFutureExprs = STNodeFactory.createNodeList(waitFutureExprList); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } STNode waitFutureExprs = STNodeFactory.createNodeList(waitFutureExprList); endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { this.errorHandler.reportInvalidNode(null, "mapping constructor expression cannot use as å wait expression"); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd(int nextTokenIndex) { return parseWaitFutureExprEnd(peek().kind, 1); } private STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) { switch (nextTokenKind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextTokenKind) || !isValidExpressionStart(nextTokenKind, nextTokenIndex)) { return null; } Solution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitFutureExprEnd(solution.tokenKind, 0); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing wait field"); return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(nextToken.kind); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private STNode parseWaitFieldEnd() { return parseWaitFieldEnd(peek().kind); } private STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitFieldEnd(solution.tokenKind); } } private STNode parseWaitField() { return parseWaitField(peek().kind); } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseWaitField(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); return createQualifiedWaitField(identifier); default: Solution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitField(solution.tokenKind); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return sol.recoveredNode; } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryPipeline, STNode selectClause) { if (selectClause != null) { this.errorHandler.reportInvalidNode(null, "cannot have a select clause in query action"); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DO_KEYWORD); return sol.recoveredNode; } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return sol.recoveredNode; } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ELVIS_CONDITIONAL, true, false); STNode colon = parseColon(); endContext(); STNode endExpr = parseExpression(OperatorPrecedence.ELVIS_CONDITIONAL, true, false); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ENUM_KEYWORD); return sol.recoveredNode; } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); List<STNode> enumMemberList = new ArrayList<>(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { this.errorHandler.reportMissingTokenError("enum member list cannot be empty"); return STNodeFactory.createNodeList(new ArrayList<>()); } STNode enumMember = parseEnumMember(); nextToken = peek(); STNode enumMemberRhs; while (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberRhs(nextToken.kind); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); nextToken = peek(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STToken nextToken = peek(); STNode metadata; switch (nextToken.kind) { case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextToken.kind); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberInternalRhs(metadata, identifierNode); } private STNode parseEnumMemberInternalRhs(STNode metadata, STNode identifierNode) { return parseEnumMemberInternalRhs(metadata, identifierNode, peek().kind); } private STNode parseEnumMemberInternalRhs(STNode metadata, STNode identifierNode, SyntaxKind nextToken) { STNode equalToken, constExprNode; switch (nextToken) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_INTERNAL_RHS, metadata, identifierNode); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseEnumMemberInternalRhs(metadata, identifierNode, solution.tokenKind); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } /** * Parse transaction statement. * <p> * <code>transaction-stmt := "transaction" block-stmt ;</code> * * @return Transaction statement node */ private STNode parseTransactionStatement() { startContext(ParserRuleContext.TRANSACTION_STMT); STNode transactionKeyword = parseTransactionKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt); } /** * Parse transaction keyword. * * @return parsed node */ private STNode parseTransactionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD); return sol.recoveredNode; } } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD); return sol.recoveredNode; } } /** * Parse retry statement. * <p> * <code> * retry-stmt := "retry" retry-spec block-stmt * <br/> * retry-spec := [type-parameter] [ "(" arg-list ")" ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); endContext(); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { return parseRetryKeywordRhs(peek().kind, retryKeyword); } private STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) { switch (nextTokenKind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam); default: Solution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRetryKeywordRhs(solution.tokenKind, retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { return parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam); } private STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) { STNode args; switch (nextTokenKind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt); } private STNode parseRetryBody() { return parseRetryBody(peek().kind); } private STNode parseRetryBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); default: Solution solution = recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(solution.tokenKind); } } private STNode parseEnumMemberRhs() { return parseEnumMemberRhs(peek().kind); } private STNode parseEnumMemberRhs(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseEnumMemberRhs(solution.tokenKind); } } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETRY_KEYWORD); return sol.recoveredNode; } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return parsed node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return sol.recoveredNode; } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return parsed node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return sol.recoveredNode; } } private STNode parseDestructureAssignmentOrVarDecl(STNode annots) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); STNode stmt; STNode listBindingPatternOrTupleType = parseListBindingPatternOrTupleType(true); if (listBindingPatternOrTupleType.kind == SyntaxKind.LIST_BINDING_PATTERN) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); stmt = parseAssignmentStmtRhs(listBindingPatternOrTupleType); } else { switchContext(ParserRuleContext.VAR_DECL_STMT); STNode varName = parseBindingPattern(); STNode typedBindingPattern = STNodeFactory.createTypedBindingPatternNode(listBindingPatternOrTupleType, varName); stmt = parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false); } endContext(); return stmt; } private STNode parseListBindingPatternOrTupleType(boolean isRoot) { startContext(ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_DESC); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); STToken nextToken = peek(); if (!isEndOfReceiveFields(nextToken.kind)) { STNode member; STNode memberEnd; nextToken = peek(); while (!isEndOfReceiveFields(nextToken.kind)) { member = parseListBindingPatternOrTupleTypeAmbiguousMember(); SyntaxKind currentNodeType = getParsingNodeType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(openBracket, memberList, member); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member); case NONE: default: memberList.add(member); break; } memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } } STNode closeBracket = parseCloseBracket(); STNode listBindingPatternOrTupleTypeDesc = parseListBindingPatternOrTupleType(openBracket, memberList, closeBracket, isRoot); endContext(); return listBindingPatternOrTupleTypeDesc; } private STNode parseListBindingPatternOrTupleTypeAmbiguousMember() { return parseListBindingPatternOrTupleTypeAmbiguousMember(peek().kind); } /** * Parse a member of a list-binding-pattern or tuple-type-desc, when the parent is ambiguous. * * @param nextTokenKind Kind of the next token. * @return Parsed node */ private STNode parseListBindingPatternOrTupleTypeAmbiguousMember(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: return parseListBindingPatternOrTupleType(false); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); nextTokenKind = peek().kind; if (isWildcardBP(identifier)) { return STNodeFactory.createCaptureBindingPatternNode(identifier); } if (isTypeFollowingToken(nextTokenKind)) { return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } if (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } if (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return identifier; case OPEN_BRACE_TOKEN: return parseListBindingPatternMember(); case ERROR_KEYWORD: if (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); default: if (isTypeStartingToken(nextTokenKind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } Solution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListBindingPatternOrTupleTypeAmbiguousMember(solution.tokenKind); } } private STNode parseAsTupleTypeDesc(STNode openBracket, List<STNode> memberList, STNode member) { memberList = getTypeDescList(memberList); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode tupleTypeMembers = parseTupleTypeMembers(member, memberList); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList, STNode member) { memberList = getBindingPattern(memberList); memberList.add(member); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList); endContext(); return listBindingPattern; } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList) { memberList = getBindingPattern(memberList); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, memberList); endContext(); return listBindingPattern; } private List<STNode> getTypeDescList(List<STNode> ambibuousList) { List<STNode> typeDescList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { if (item.kind != SyntaxKind.LIST_BP_OR_TUPLE_TYPE_DESC) { typeDescList.add(item); continue; } STAmbiguousListNode innerList = (STAmbiguousListNode) item; STNode memberTypeDescList = STNodeFactory.createNodeList(innerList.members); STNode typeDesc = STNodeFactory.createTupleTypeDescriptorNode(innerList.openBracket, memberTypeDescList, innerList.closeBracket); typeDescList.add(typeDesc); } return typeDescList; } private List<STNode> getBindingPattern(List<STNode> ambibuousList) { List<STNode> typeDescList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { switch (item.kind) { case SIMPLE_NAME_REFERENCE: typeDescList.add(STNodeFactory.createCaptureBindingPatternNode(item)); break; case LIST_BP_OR_TUPLE_TYPE_DESC: STAmbiguousListNode innerList = (STAmbiguousListNode) item; STNode memberBindingPatterns = STNodeFactory.createNodeList(innerList.members); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode typeDesc = STNodeFactory.createListBindingPatternNode(innerList.openBracket, memberBindingPatterns, restBindingPattern, innerList.closeBracket); typeDescList.add(typeDesc); break; default: typeDescList.add(item); break; } } return typeDescList; } private boolean isTypeFollowingToken(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case QUESTION_MARK_TOKEN: return true; default: return false; } } private SyntaxKind getParsingNodeType(STNode memberNode) { if (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_TUPLE_TYPE_DESC: default: return SyntaxKind.NONE; } } private STNode parseListBindingPatternOrTupleType(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { if (!isRoot) { return new STAmbiguousListNode(SyntaxKind.LIST_BP_OR_TUPLE_TYPE_DESC, openBracket, members, closeBracket); } switch (peek().kind) { case EQUAL_TOKEN: STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPattern(members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createListBindingPatternNode(openBracket, memberBindingPatterns, restBindingPattern, closeBracket); default: if (members.isEmpty()) { this.errorHandler.reportMissingTokenError("missing member"); } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); } } private boolean isWildcardBP(STNode node) { if (node.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } STToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name; return "_".equals(nameToken.text()); } /** * Parse service-constructor-expr. * * service-constructor-expr := [annots] service service-body-block * service-body-block := { service-method-defn* } * service-method-defn := * metadata * [resource] * function identifier function-signature method-defn-body * * @param annots Annots * @return Parsed node */ private STNode parseServiceConstructorExpression(STNode annots) { startContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION); STNode serviceKeyword = parseServiceKeyword(); STNode serviceBody = parseServiceBody(); endContext(); return STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody); } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @param kind byte array literal kind * @return parsed node */ private STNode parseByteArrayLiteral(SyntaxKind kind) { STNode type; if (kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseByteArrayContent(kind); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BASE16_KEYWORD); return sol.recoveredNode; } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BASE64_KEYWORD); return sol.recoveredNode; } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @param kind byte array literal kind * @return parsed node */ private STNode parseByteArrayContent(SyntaxKind kind) { STNode content = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { content = parseTemplateItem(); items.add(content); nextToken = peek(); } if (items.size() > 1) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (items.size() == 1 && content.kind != SyntaxKind.TEMPLATE_STRING) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (items.size() == 1) { if (kind == SyntaxKind.BASE16_KEYWORD && !BallerinaLexer.isValidBase16LiteralContent(content.toString())) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (kind == SyntaxKind.BASE64_KEYWORD && !BallerinaLexer.isValidBase64LiteralContent(content.toString())) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } } return content; } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return sol.recoveredNode; } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing xml atomic name pattern"); return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); nextToken = peek(); STNode leadingPipe; while (!isEndOfXMLNamePattern(nextToken.kind)) { leadingPipe = parsePipeToken(); xmlAtomicNamePatternList.add(leadingPipe); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); nextToken = peek(); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: return false; case GT_TOKEN: case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { STToken token = peek(); if (token.kind == SyntaxKind.ASTERISK_TOKEN) { return consume(); } else if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode identifier = consume(); return parseXMLAtomicNameIdentifier(identifier); } return STNodeFactory.createEmptyNode(); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start xml-step-extend*</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); STNode xmlStepExtendList = parseXMLStepExtendList(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart, xmlStepExtendList); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return sol.recoveredNode; } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return sol.recoveredNode; } } /** * Parse xml step extend list. * <p> * <code>xml-step-extend-list := xml-step-extend*</code> * * @return Parsed node */ private STNode parseXMLStepExtendList() { List<STNode> xmlStepExtendList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLStepExtend(nextToken.kind)) { return STNodeFactory.createNodeList(xmlStepExtendList); } nextToken = peek(); STNode xmlStepExtend; while (!isEndOfXMLNamePattern(nextToken.kind)) { xmlStepExtend = parseXMLStepExtend(); xmlStepExtendList.add(xmlStepExtend); nextToken = peek(); } return STNodeFactory.createNodeList(xmlStepExtendList); } private boolean isEndOfXMLStepExtend(SyntaxKind tokenKind) { switch (tokenKind) { case DOT_LT_TOKEN: case OPEN_BRACKET_TOKEN: case DOT_TOKEN: return false; default: return true; } } /** * Parse xml step extend list. * <p> * <code> * xml-step-extend := * .< xml-name-pattern > * | [ expression ] * | . method-name ( arg-list ) * </code> * * @return Parsed node */ private STNode parseXMLStepExtend() { STToken token = peek(); switch (token.kind) { case DOT_LT_TOKEN: return parseXMLFilterExpressionRhs(); case DOT_TOKEN: case OPEN_BRACKET_TOKEN: default: STNode lhsExpr = STNodeFactory.createEmptyNode(); return parseExpressionRhs(token.kind, OperatorPrecedence.ACTION, lhsExpr, true, false); } } }
STNode xmlStepExtendList = parseXMLStepExtendList();
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_EXPRESSION); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode identifier = parseStatementStartIdentifier(); return parseStatementStartIdentifierRhs(annots, identifier); } private STNode parseStatementStartIdentifierRhs(STNode annots, STNode identifier) { return parseStatementStartIdentifierRhs(peek().kind, annots, identifier); } private STNode parseStatementStartIdentifierRhs(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: STNode tbpOrMemberAccess = parseTypedBindingPatternOrMemberAccess(identifier, false, ParserRuleContext.AMBIGUOUS_STMT); if (tbpOrMemberAccess.kind == SyntaxKind.INDEXED_EXPRESSION) { STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, tbpOrMemberAccess, false, true); return parseStatementStartWithExpr(expr); } STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, tbpOrMemberAccess, false); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: return parseTypeDescStartsWithIdentifier(identifier, annots); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStatementStartWithExpr(nextTokenKind, identifier); case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) { return parseTypeDescStartsWithIdentifier(identifier, annots); } default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, identifier, false, true); return parseStatementStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_IDENTIFIER_RHS, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartIdentifierRhs(solution.tokenKind, annots, identifier); } } private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) { switchContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseExpressionStament(SyntaxKind nextTokenKind, STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots); return getExpressionAsStatement(expression); } private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(nextTokenKind, annots); return parseStatementStartWithExpr(expression); } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStatementStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStatementStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStatementStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartWithExpr(solution.tokenKind, expression); } } private STNode getArrayLength(STNodeList exprs) { if (exprs.isEmpty()) { return STNodeFactory.createEmptyNode(); } STNode lengthExpr = exprs.get(0); switch (lengthExpr.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "invalid array length"); break; } return lengthExpr; } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { if (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return isMissingNode(((STSimpleNameReferenceNode) node).name); } return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; switch (nextTokenKind) { case DEFAULT_KEYWORD: name = parseDefaultKeyword(); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); if (solution.action == Action.REMOVE) { name = solution.recoveredNode; break; } return parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow, STNode name) { switch (nextTokenKind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name); } } /** * Parse default keyword. * * @return default keyword node */ private STNode parseDefaultKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DEFAULT_KEYWORD) { return STNodeFactory.createSimpleNameReferenceNode(consume()); } else { Solution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD); return sol.recoveredNode; } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = addDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = errorHandler.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseConstExprInternal(); endContext(); return expr; } private STNode parseConstExprInternal() { STToken nextToken = peek(); return parseConstExprInternal(nextToken.kind); } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExprInternal(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_BRACE_TOKEN: return parseNilLiteral(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return solution.recoveredNode; } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } private STNode createNilLiteral(STNode openParenthesisToken, STNode closeParenthesisToken) { return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(context); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PIPE); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case READONLY_KEYWORD: case DISTINCT_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultipleNamedWorkerDeclarations() { ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultipleNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createNodeList(expressions); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { return parseListConstructorMemberEnd(peek().kind); } private STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListConstructorMemberEnd(solution.tokenKind); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD); return sol.recoveredNode; } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IN_KEYWORD); return sol.recoveredNode; } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD); return sol.recoveredNode; } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { List<STNode> mappings = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode leadingComma; while (!isEndOfTableRowList(nextToken.kind)) { leadingComma = parseComma(); mappings.add(leadingComma); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD); return sol.recoveredNode; } } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { List<STNode> fieldNames = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [error-type-param] * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyWord(); STNode errorTypeParamsNode; STToken nextToken = peek(); STToken nextNextToken = peek(2); if (nextToken.kind == SyntaxKind.LT_TOKEN || nextNextToken.kind == SyntaxKind.GT_TOKEN) { errorTypeParamsNode = parseErrorTypeParamsNode(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error type param node. * <p> * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeParamsNode() { STNode ltToken = parseLTToken(); STNode parameter; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { parameter = consume(); } else { parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } STNode gtToken = parseGTToken(); return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyWord() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ERROR_KEYWORD); return sol.recoveredNode; } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor() { STNode streamKeywordToken = parseStreamKeyword(); STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { return parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode); } private STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (nextTokenKind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STREAM_KEYWORD); return sol.recoveredNode; } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = addDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LET_KEYWORD); return sol.recoveredNode; } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_KEYWORD); return sol.recoveredNode; } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XML_KEYWORD); return sol.recoveredNode; } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } TextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader())); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); removeAdditionalTokensInInterpolation(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return sol.recoveredNode; } } /** * Remove if there any tokens left after the expression inside the interpolation. */ private void removeAdditionalTokensInInterpolation() { while (true) { STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: return; case CLOSE_BRACE_TOKEN: return; default: consume(); this.errorHandler.reportInvalidNode(nextToken, "invalid token '" + nextToken.text() + "'"); } } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { Solution sol = recover(token, ctx); return sol.recoveredNode; } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor() { STNode tableKeywordToken = parseTableKeyword(); STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.KEY_KEYWORD) { STNode keyKeywordToken = parseKeyKeyword(); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { return parseKeyConstraint(peek().kind, keyKeywordToken); } private STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) { switch (nextTokenKind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: Solution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseKeyConstraint(solution.tokenKind, keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := function function-signature</code> * * @return Function type descriptor node */ private STNode parseFunctionTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); STNode signature = parseFuncSignature(true); endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature); } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code> * * @param annots Annotations. * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @return */ private STNode parseAnonFuncBody() { return parseAnonFuncBody(peek().kind); } private STNode parseAnonFuncBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true); default: Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnonFuncBody(solution.tokenKind); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return sol.recoveredNode; } } private STNode parseImplicitAnonFunc(STNode params) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: this.errorHandler.reportInvalidNode(null, "lhs must be an identifier or a param list"); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param params Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode params) { List<STNode> paramList = new ArrayList<>(); paramList.add(params.expression); return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen, STNodeFactory.createNodeList(paramList), params.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(nextToken.kind); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams); } private STNode parseImplicitAnonFuncParamEnd() { return parseImplicitAnonFuncParamEnd(peek().kind); } private STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImplicitAnonFuncParamEnd(solution.tokenKind); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = addDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(nextToken.kind); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { return parseTupleMemberRhs(peek().kind); } private STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: Solution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTupleMemberRhs(solution.tokenKind); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) { STNode queryConstructType; switch (nextTokenKind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseStreamKeyword(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { return parseTableConstructorOrQuery(peek().kind, tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQuery(solution.tokenKind, tableKeyword, isRhsExpr); } } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { return parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (nextTokenKind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode tableKeyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(tableKeyword, keySpecifier); } /** * Parse query expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { this.errorHandler.reportInvalidNode(null, "extra clauses after select clause"); break; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryPipeline, selectClause); } if (selectClause == null) { STNode selectKeyword = errorHandler.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = errorHandler.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = errorHandler.addDiagnostics(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = errorHandler.addDiagnostics(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { return parseIntermediateClause(peek().kind, isRhsExpr); } private STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) { switch (nextTokenKind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseIntermediateClause(solution.tokenKind, isRhsExpr); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FROM_KEYWORD); return sol.recoveredNode; } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHERE_KEYWORD); return sol.recoveredNode; } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = addDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SELECT_KEYWORD); return sol.recoveredNode; } } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); validateExprInStartAction(expr); return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.START_KEYWORD); return sol.recoveredNode; } } private void validateExprInStartAction(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the start keyword must be a " + "func-call, a method-call or a remote-method-call"); break; } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD); return sol.recoveredNode; } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptor(context); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: literal = consume(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(tokenKind); switch (tokenKind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex + 1).kind); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case SERVICE_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex + 1); case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: Solution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME); return sol.recoveredNode; } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return sol.recoveredNode; } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { return parseReceiveWorkers(peek().kind); } private STNode parseReceiveWorkers(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case DEFAULT_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseReceiveWorkers(solution.tokenKind); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing receive field"); return STNodeFactory.createNodeList(new ArrayList<>()); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(nextToken.kind); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseReceiveFieldEnd(solution.tokenKind); } } private STNode parseReceiveField() { return parseReceiveField(peek().kind); } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseReceiveField(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case DEFAULT_KEYWORD: return parseDefaultKeyword(); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return createQualifiedReceiveField(solution.recoveredNode); } return solution.recoveredNode; } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return sol.recoveredNode; } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = parseGTToken(); validateRightShiftOperatorWS(openGTToken); STNode endLGToken = parseGTToken(); return STNodeFactory.createDoubleGTTokenNode(openGTToken, endLGToken); } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = parseGTToken(); validateRightShiftOperatorWS(openGTToken); STNode middleGTToken = parseGTToken(); validateRightShiftOperatorWS(middleGTToken); STNode endLGToken = parseGTToken(); return STNodeFactory.createTrippleGTTokenNode(openGTToken, middleGTToken, endLGToken); } /** * Validate the whitespace between '>' tokens of right shift operators. * * @param node Preceding node */ private void validateRightShiftOperatorWS(STNode node) { int diff = node.widthWithTrailingMinutiae() - node.width(); if (diff > 0) { this.errorHandler.reportMissingTokenError("no whitespaces allowed between >>"); } } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WAIT_KEYWORD); return sol.recoveredNode; } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); List<STNode> waitFutureExprList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing wait field"); endContext(); STNode waitFutureExprs = STNodeFactory.createNodeList(waitFutureExprList); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(nextToken.kind, 1); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { this.errorHandler.reportInvalidNode(null, "mapping constructor expression cannot use as å wait expression"); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd(int nextTokenIndex) { return parseWaitFutureExprEnd(peek().kind, 1); } private STNode parseWaitFutureExprEnd(SyntaxKind nextTokenKind, int nextTokenIndex) { switch (nextTokenKind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextTokenKind) || !isValidExpressionStart(nextTokenKind, nextTokenIndex)) { return null; } Solution solution = recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END, nextTokenIndex); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitFutureExprEnd(solution.tokenKind, 0); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing wait field"); return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(nextToken.kind); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private STNode parseWaitFieldEnd() { return parseWaitFieldEnd(peek().kind); } private STNode parseWaitFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitFieldEnd(solution.tokenKind); } } private STNode parseWaitField() { return parseWaitField(peek().kind); } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseWaitField(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); return createQualifiedWaitField(identifier); default: Solution solution = recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseWaitField(solution.tokenKind); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return sol.recoveredNode; } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryPipeline, STNode selectClause) { if (selectClause != null) { this.errorHandler.reportInvalidNode(null, "cannot have a select clause in query action"); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DO_KEYWORD); return sol.recoveredNode; } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return sol.recoveredNode; } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ELVIS_CONDITIONAL, true, false); STNode colon = parseColon(); endContext(); STNode endExpr = parseExpression(OperatorPrecedence.ELVIS_CONDITIONAL, true, false); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ENUM_KEYWORD); return sol.recoveredNode; } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); List<STNode> enumMemberList = new ArrayList<>(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { this.errorHandler.reportMissingTokenError("enum member list cannot be empty"); return STNodeFactory.createNodeList(new ArrayList<>()); } STNode enumMember = parseEnumMember(); nextToken = peek(); STNode enumMemberRhs; while (nextToken.kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(nextToken.kind); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); nextToken = peek(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STToken nextToken = peek(); STNode metadata; switch (nextToken.kind) { case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextToken.kind); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { return parseEnumMemberRhs(peek().kind, metadata, identifierNode); } private STNode parseEnumMemberRhs(SyntaxKind nextToken, STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (nextToken) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseEnumMemberRhs(solution.tokenKind, metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { return parseEnumMemberEnd(peek().kind); } private STNode parseEnumMemberEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ENUM_MEMBER_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseEnumMemberEnd(solution.tokenKind); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := "transaction" block-stmt ;</code> * * @return Transaction statement node */ private STNode parseTransactionStatement() { startContext(ParserRuleContext.TRANSACTION_STMT); STNode transactionKeyword = parseTransactionKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt); } /** * Parse transaction keyword. * * @return parsed node */ private STNode parseTransactionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRANSACTION_KEYWORD); return sol.recoveredNode; } } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMIT_KEYWORD); return sol.recoveredNode; } } /** * Parse retry statement. * <p> * <code> * retry-stmt := "retry" retry-spec block-stmt * <br/> * retry-spec := [type-parameter] [ "(" arg-list ")" ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); endContext(); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { return parseRetryKeywordRhs(peek().kind, retryKeyword); } private STNode parseRetryKeywordRhs(SyntaxKind nextTokenKind, STNode retryKeyword) { switch (nextTokenKind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(nextTokenKind, retryKeyword, typeParam); default: Solution solution = recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRetryKeywordRhs(solution.tokenKind, retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { return parseRetryTypeParamRhs(peek().kind, retryKeyword, typeParam); } private STNode parseRetryTypeParamRhs(SyntaxKind nextTokenKind, STNode retryKeyword, STNode typeParam) { STNode args; switch (nextTokenKind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(solution.tokenKind, retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt); } private STNode parseRetryBody() { return parseRetryBody(peek().kind); } private STNode parseRetryBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); default: Solution solution = recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(solution.tokenKind); } } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETRY_KEYWORD); return sol.recoveredNode; } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return sol.recoveredNode; } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return sol.recoveredNode; } } /** * Parse service-constructor-expr. * <p> * <code> * service-constructor-expr := [annots] service service-body-block * <br/> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @param annots Annotations * @return Service constructor expression node */ private STNode parseServiceConstructorExpression(STNode annots) { startContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION); STNode serviceKeyword = parseServiceKeyword(); STNode serviceBody = parseServiceBody(); endContext(); return STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody); } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @param kind byte array literal kind * @return parsed node */ private STNode parseByteArrayLiteral(SyntaxKind kind) { STNode type; if (kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseByteArrayContent(kind); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BASE16_KEYWORD); return sol.recoveredNode; } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BASE64_KEYWORD); return sol.recoveredNode; } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @param kind byte array literal kind * @return parsed node */ private STNode parseByteArrayContent(SyntaxKind kind) { STNode content = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { content = parseTemplateItem(); items.add(content); nextToken = peek(); } if (items.size() > 1) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (items.size() == 1 && content.kind != SyntaxKind.TEMPLATE_STRING) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (items.size() == 1) { if (kind == SyntaxKind.BASE16_KEYWORD && !BallerinaLexer.isValidBase16LiteralContent(content.toString())) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } else if (kind == SyntaxKind.BASE64_KEYWORD && !BallerinaLexer.isValidBase64LiteralContent(content.toString())) { this.errorHandler.reportInvalidNode(null, "invalid content within backticks"); } } return content; } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return sol.recoveredNode; } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { this.errorHandler.reportMissingTokenError("missing xml atomic name pattern"); return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode leadingPipe; while (!isEndOfXMLNamePattern(peek().kind)) { leadingPipe = parsePipeToken(); xmlAtomicNamePatternList.add(leadingPipe); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: return false; case GT_TOKEN: case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: Solution sol = recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } if (sol.recoveredNode.kind == SyntaxKind.ASTERISK_TOKEN) { return sol.recoveredNode; } identifier = sol.recoveredNode; break; } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return sol.recoveredNode; } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return sol.recoveredNode; } } /** * Returns 'true' if the list is empty. * <p> * First check whether this node is an instance of STNodeList. * * @param node the nodelist instance * @return returns 'true' if the list is empty */ private boolean isNodeListEmpty(STNode node) { if (!NodeListUtils.isSTNodeList(node)) { throw new IllegalArgumentException("The 'node' should be an instance of STNodeList"); } STNodeList nodeList = (STNodeList) node; return nodeList.isEmpty(); } /** * Returns a clone of the given STNode with the given diagnostic if the nodeList is empty, * otherwise returns the original STNode. * * @param nodeList the node list instance * @param target the STNode instance * @param diagnosticCode the DiagnosticCode to be added to the node * @return a clone of the given STNode */ private STNode addDiagnosticIfListEmpty(STNode nodeList, STNode target, DiagnosticCode diagnosticCode) { if (isNodeListEmpty(nodeList)) { return errorHandler.addDiagnostics(target, diagnosticCode); } return target; } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern *</code> * * @return binding-pattern node */ private STNode parseBindingPattern() { STToken token = peek(); return parseBindingPattern(token.kind); } private STNode parseBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseCaptureOrWildcardBindingPattern(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); default: Solution sol = recover(peek(), ParserRuleContext.BINDING_PATTERN); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return parseBindingPattern(sol.tokenKind); } } /** * Parse capture-binding-pattern. * <p> * <code> * capture-binding-pattern := variable-name * <br/> * variable-name := identifier * </code> * * @return capture-binding-pattern node */ private STNode parseCaptureOrWildcardBindingPattern() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode varName = parseVariableName(); return createCaptureOrWildcardBP(varName); } else { Solution sol = recover(token, ParserRuleContext.CAPTURE_BINDING_PATTERN); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return STNodeFactory.createCaptureBindingPatternNode(sol.recoveredNode); } } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingpatternRhs(token.kind); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1); } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } private STNode parseListBindingpatternRhs() { return parseListBindingpatternRhs(peek().kind); } private STNode parseListBindingpatternRhs(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_END_OR_CONTINUE); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListBindingpatternRhs(solution.tokenKind); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern entry. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return rest-binding-pattern node */ private STNode parseListBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseBindingPattern(); } } private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); return STNodeFactory.createRestBindingPatternNode(ellipsis, varName); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); List<STNode> bindingPatterns = new ArrayList<>(); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); STNode restBindingPattern = STNodeFactory.createEmptyNode(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } STNode prevMember = parseMappingBindingPatternMember(); bindingPatterns.add(prevMember); return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode member) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingpatternEnd(token.kind); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); member = parseMappingBindingPatternMember(); if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = member; } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingpatternEnd() { return parseMappingBindingpatternEnd(peek().kind); } private STNode parseMappingBindingpatternEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingBindingpatternEnd(solution.tokenKind); } } private STNode parseFieldBindingPattern() { return parseFieldBindingPattern(peek().kind); } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: Solution solution = recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldBindingPattern(solution.tokenKind); } } private STNode parseFieldBindingPattern(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(identifier); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(identifier, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: return true; default: return false; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(peek().kind, typeDesc, context); } private STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(nextTokenKind); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; default: Solution solution = recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case NONE: default: break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(member); memberList.add(memberEnd); STNode bindingPattern = parseAsListBindingPattern(openBracket, memberList); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, true, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseBracketedListMember(boolean isTypedBindingPattern) { return parseBracketedListMember(peek().kind, isTypedBindingPattern); } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param nextTokenKind Kind of the next token * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) { switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); nextTokenKind = peek().kind; if (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } return identifier; } break; default: if (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; Solution solution = recover(peek(), recoverContext, isTypedBindingPattern); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseBracketedListMember(solution.tokenKind, isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { nextTokenKind = peek().kind; if (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, context); } private STNode parseBracketedListMemberEnd() { return parseBracketedListMemberEnd(peek().kind); } private STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseBracketedListMemberEnd(solution.tokenKind); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, ParserRuleContext context) { STToken nextToken = peek(); return parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, context); } private STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, ParserRuleContext context) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = STNodeFactory.createNodeList(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket, context); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (isValidExprRhsStart(nextTokenKind)) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } Solution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket, true, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member, closeBracket, true, context); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns; if (isEmpty(member)) { bindingPatterns = STNodeFactory.createNodeList(); } else { STNode varName = ((STSimpleNameReferenceNode) member).name; STNode bindingPattern = createCaptureOrWildcardBP(varName); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, restBindingPattern, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type that involves array type-desc in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @param context COntext in which the typed binding pattern occurs * @return Parsed node */ private STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, ParserRuleContext context) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket); STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) parseTypedBindingPattern(context); STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return sol.recoveredNode; } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_TOKEN: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: return SyntaxKind.NONE; default: return SyntaxKind.INDEXED_EXPRESSION; } } /** * Create a type-desc out of an expression. * * @param expression Expression * @return Type descriptor */ private STNode getTypeDescFromExpr(STNode expression) { switch (expression.kind) { case INDEXED_EXPRESSION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return expression; } STIndexedExpressionNode indexedExpr = (STIndexedExpressionNode) expression; STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNode arrayLength = getArrayLength((STNodeList) indexedExpr.keyExpression); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, arrayLength, indexedExpr.closeBracket); } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); STNode bracketedList = parseStatementStartBracketedList(true); switch (bracketedList.kind) { case LIST_CONSTRUCTOR: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); STListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) bracketedList; STNode members = listConstructor.expressions; if (possibleMappingField && peek().kind == SyntaxKind.COLON_TOKEN && members.bucketCount() == 1) { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode colon = parseColon(); STNode fieldNameExpr = getExpression(members.childInBucket(0)); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(listConstructor.openBracket, fieldNameExpr, listConstructor.closeBracket, colon, valueExpr); } STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bracketedList, false, true); return parseStatementStartWithExpr(expr); case LIST_BINDING_PATTERN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(bracketedList); case TUPLE_TYPE_DESC: default: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode varName = parseBindingPattern(); STNode typedBindingPattern = STNodeFactory.createTypedBindingPatternNode(bracketedList, varName); return parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false); } } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartBracketedList(boolean isRoot) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(openBracket, memberList, member); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedList(openBracket, memberList, closeBracket, isRoot); endContext(); return bracketedList; } private STNode parseStatementStartBracketedListMember() { return parseStatementStartBracketedListMember(peek().kind); } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @param nextTokenKind Kind of the next token. * @return Parsed node */ private STNode parseStatementStartBracketedListMember(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedList(false); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); STNode varName = ((STSimpleNameReferenceNode) identifier).name; if (isWildcardBP(varName)) { return getWildcardBindingPattern(varName); } nextTokenKind = peek().kind; if (nextTokenKind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: if (getNextNextToken(nextTokenKind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextTokenKind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextTokenKind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); default: if (isValidExpressionStart(nextTokenKind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextTokenKind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } Solution solution = recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartBracketedListMember(solution.tokenKind); } } private STNode parseAsTupleTypeDesc(STNode openBracket, List<STNode> memberList, STNode member) { memberList = getTypeDescList(memberList); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode tupleTypeMembers = parseTupleTypeMembers(member, memberList); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList, STNode member) { memberList = getBindingPatternsList(memberList); memberList.add(member); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList); endContext(); return listBindingPattern; } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList) { memberList = getBindingPatternsList(memberList); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, memberList); endContext(); return listBindingPattern; } private STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList, STNode member) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode listBindingPatternOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket); } else { memberList.add(memberEnd); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList); } return listBindingPatternOrListCons; } private SyntaxKind getStmtStartBracketedListType(STNode memberNode) { if (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: default: return SyntaxKind.NONE; } } private STNode parseStatementStartBracketedList(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { if (!isRoot) { return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } switch (peek().kind) { case EQUAL_TOKEN: STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createListBindingPatternNode(openBracket, memberBindingPatterns, restBindingPattern, closeBracket); case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: if (members.isEmpty()) { this.errorHandler.reportMissingTokenError("missing member"); } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); case COLON_TOKEN: default: STNode expressions = STNodeFactory.createNodeList(getExpressionList(members)); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } } private boolean isWildcardBP(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: STToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name; return isUnderscoreToken(nameToken); case IDENTIFIER_TOKEN: return isUnderscoreToken((STToken) node); default: return false; } } private boolean isUnderscoreToken(STToken token) { return "_".equals(token.text()); } private STNode getWildcardBindingPattern(STNode identifier) { switch (identifier.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) identifier).name; return STNodeFactory.createWildcardBindingPatternNode(varName); case IDENTIFIER_TOKEN: return STNodeFactory.createWildcardBindingPatternNode(identifier); default: throw new IllegalStateException(); } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-brace. */ /** * Parse statements that starts with open-brace. It could be a: * 1) Block statement * 2) Var-decl with mapping binding pattern. * 3) Statement that starts with mapping constructor expression. * * @return Parsed node */ private STNode parseStatementStartsWithOpenBrace() { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode openBrace = parseOpenBrace(); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { STNode closeBrace = parseCloseBrace(); switch (peek().kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode fields = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields, restBindingPattern, closeBrace); return parseAssignmentStmtRhs(bindingPattern); case RIGHT_ARROW_TOKEN: case SYNC_SEND_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); fields = STNodeFactory.createEmptyNodeList(); STNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExpr(expr); default: STNode statements = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } STNode member = parseStatementStartingBracedListFirstMember(); SyntaxKind nodeType = getBracedListType(member); STNode stmt; switch (nodeType) { case MAPPING_BINDING_PATTERN: return parseStmtAsMappingBindingPatternStart(openBrace, member); case MAPPING_CONSTRUCTOR: return parseStmtAsMappingConstructorStart(openBrace, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return parseStmtAsMappingBPOrMappingConsStart(openBrace, member); case BLOCK_STATEMENT: STNode closeBrace = parseCloseBrace(); stmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace); endContext(); return stmt; default: ArrayList<STNode> stmts = new ArrayList<>(); stmts.add(member); STNode statements = parseStatements(stmts); closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } /** * Parse the rest of the statement, treating the start as a mapping binding pattern. * * @param openBrace Open brace * @param firstMappingField First member * @return Parsed node */ private STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); List<STNode> bindingPatterns = new ArrayList<>(); if (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(getBindingPattern(firstMappingField)); } STNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField); return parseAssignmentStmtRhs(mappingBP); } /** * Parse the rest of the statement, treating the start as a mapping constructor expression. * * @param openBrace Open brace * @param firstMember First member * @return Parsed node */ private STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); List<STNode> members = new ArrayList<>(); STNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true); return parseStatementStartWithExpr(expr); } /** * Parse the braced-list as a mapping constructor expression. * * @param openBrace Open brace * @param members members list * @param member Most recently parsed member * @return Parsed node */ private STNode parseAsMappingConstructor(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getExpressionList(members); switchContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode fields = parseMappingConstructorFields(members); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse the rest of the statement, treating the start as a mapping binding pattern * or a mapping constructor expression. * * @param openBrace Open brace * @param member First member * @return Parsed node */ private STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); List<STNode> members = new ArrayList<>(); members.add(member); STNode bpOrConstructor; STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { STNode closeBrace = parseCloseBrace(); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace); } else { members.add(memberEnd); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members);; } switch (bpOrConstructor.kind) { case MAPPING_CONSTRUCTOR: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true); return parseStatementStartWithExpr(expr); case MAPPING_BINDING_PATTERN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: if (peek().kind == SyntaxKind.EQUAL_TOKEN) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); } switchContext(ParserRuleContext.EXPRESSION_STATEMENT); expr = getExpression(bpOrConstructor); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExpr(expr); } } /** * Parse a member of a braced-list that occurs at the start of a statement. * * @return Parsed node */ private STNode parseStatementStartingBracedListFirstMember() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseIdentifierRhsInStmtStartingBrace(); case STRING_LITERAL: STNode key = parseStringLiteral(); if (peek().kind == SyntaxKind.COLON_TOKEN) { STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(key, colon, valueExpr); } STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true); return parseStatementStartWithExpr(expr); case OPEN_BRACKET_TOKEN: return parseStatementStartsWithOpenBracket(null, true); case OPEN_BRACE_TOKEN: return parseStatementStartsWithOpenBrace(); case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: switchContext(ParserRuleContext.BLOCK_STMT); return parseStatements(); } } /** * Parse the rhs components of an identifier that follows an open brace, * at the start of a statement. i.e: "{foo". * * @return Parsed node */ private STNode parseIdentifierRhsInStmtStartingBrace() { STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); switch (peek().kind) { case COMMA_TOKEN: STNode colon = STNodeFactory.createEmptyNode(); STNode value = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(identifier, colon, value); case COLON_TOKEN: colon = parseColon(); SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: STNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case OPEN_BRACE_TOKEN: bindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case IDENTIFIER_TOKEN: return parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon); default: STNode expr = parseExpression(); return getMappingField(identifier, colon, expr); } default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode qualifiedIdentifier = parseQualifiedIdentifier(identifier); return parseStatementStartIdentifierRhs(null, qualifiedIdentifier); } } /** * Parse the rhs components of "<code>{ identifier : identifier</code>", * at the start of a statement. i.e: "{foo:bar". * * @return Parsed node */ private STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) { STNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier); if (isWildcardBP(secondIdentifier)) { return getWildcardBindingPattern(secondIdentifier); } SyntaxKind nextTokenKind = peek().kind; STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef); switch (nextTokenKind) { case COMMA_TOKEN: return qualifiedNameRef; case OPEN_BRACE_TOKEN: case IDENTIFIER_TOKEN: STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(null, finalKeyword, typeBindingPattern, false); case OPEN_BRACKET_TOKEN: return parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef); case QUESTION_MARK_TOKEN: STNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); finalKeyword = STNodeFactory.createEmptyNode(); typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(null, finalKeyword, typeBindingPattern, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStatementStartWithExpr(qualifiedNameRef); case PIPE_TOKEN: case BITWISE_AND_TOKEN: default: return parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef); } } private SyntaxKind getBracedListType(STNode member) { switch (member.kind) { case FIELD_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) member).valueExpr; if (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR || expr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case REST_BINDING_PATTERN: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; case LIST: return SyntaxKind.BLOCK_STATEMENT; default: return SyntaxKind.NONE; } } /** * Parse mapping binding pattern or mapping constructor. * * @return Parsed node */ private STNode parseMappingBindingPatterOrMappingConstructor() { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); List<STNode> memberList = new ArrayList<>(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList); } private boolean isBracedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> memberList) { STToken nextToken = peek(); while (!isBracedListEnd(nextToken.kind)) { STNode member = parseMappingBindingPatterOrMappingConstructorMember(nextToken.kind); SyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member); switch (currentNodeType) { case MAPPING_CONSTRUCTOR: return parseAsMappingConstructor(openBrace, memberList, member); case MAPPING_BINDING_PATTERN: return parseAsMappingBindingPattern(openBrace, memberList, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBrace = parseCloseBrace(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace); } private STNode parseMappingBindingPatterOrMappingConstructorMember(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseMappingFieldRhs(key); case STRING_LITERAL: key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return STNodeFactory.createRestBindingPatternNode(ellipsis, expr); } return STNodeFactory.createSpreadFieldNode(ellipsis, expr); default: Solution solution = recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListBindingPatternOrListConstructorMember(solution.tokenKind); } } private STNode parseMappingFieldRhs(STNode key) { STToken nextToken = peek(); return parseMappingFieldRhs(nextToken.kind, key); } private STNode parseMappingFieldRhs(SyntaxKind tokenKind, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); return parseMappingFieldValue(key, colon); case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(key, colon, valueExpr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, key); } } private STNode parseMappingFieldValue(STNode key, STNode colon) { STNode expr; switch (peek().kind) { case IDENTIFIER_TOKEN: expr = parseExpression(); break; case OPEN_BRACKET_TOKEN: expr = parseListBindingPatternOrListConstructor(); break; case OPEN_BRACE_TOKEN: expr = parseMappingBindingPatterOrMappingConstructor(); break; default: expr = parseExpression(); break; } return STNodeFactory.createSpecificFieldNode(key, colon, expr); } private SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) { switch (memberNode.kind) { case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) memberNode).valueExpr; if (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR || expr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case REST_BINDING_PATTERN: default: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> members, STNode closeBrace) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members, closeBrace); } private STNode parseAsMappingBindingPattern(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getBindingPatternsList(members); switchContext(ParserRuleContext.MAPPING_BINDING_PATTERN); return parseMappingBindingPattern(openBrace, members, member); } /** * Parse list binding pattern or list constructor. * * @return Parsed node */ private STNode parseListBindingPatternOrListConstructor() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseListBindingPatternOrListConstructor(openBracket, memberList); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseListBindingPatternOrListConstructorMember(nextToken.kind); SyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member); case LIST_BP_OR_LIST_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket); } private STNode parseListBindingPatternOrListConstructorMember(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: return parseListBindingPatternOrListConstructor(); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { return getWildcardBindingPattern(identifier); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); default: if (isValidExpressionStart(nextTokenKind, 1)) { return parseExpression(); } Solution solution = recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseListBindingPatternOrListConstructorMember(solution.tokenKind); } } private SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) { switch (memberNode.kind) { case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; default: return SyntaxKind.LIST_CONSTRUCTOR; } } private STNode parseAsListConstructor(STNode openBracket, List<STNode> memberList, STNode member) { memberList.add(member); memberList = getExpressionList(memberList); switchContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode expressions = parseOptionalExpressionsList(memberList); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> members, STNode closeBracket) { STNode lbpOrListCons; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: lbpOrListCons = new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members, closeBracket); break; default: if (isValidExprRhsStart(peek().kind)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); lbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } members = getBindingPatternsList(members); STNode bindingPatternsNode = STNodeFactory.createNodeList(members); STNode restBindingPattern = STNodeFactory.createEmptyNode(); lbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); break; } endContext(); return lbpOrListCons; } private STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) { STNode typedBPOrExpr = parseTypedBindingPatternOrMemberAccess(secondIdentifier, false, ParserRuleContext.AMBIGUOUS_STMT); if (typedBPOrExpr.kind == SyntaxKind.INDEXED_EXPRESSION) { return parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr); } switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.VAR_DECL_STMT); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode annots = STNodeFactory.createEmptyNode(); STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef, ((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor); return parseVarDeclRhs(annots, finalKeyword, typeDesc, false); } /** * Parse a member that starts with "foo:bar[", in a statement starting with a brace. * * @param identifier First identifier of the statement * @param colon Colon that follows the first identifier * @param secondIdentifier Identifier that follows the colon * @param memberAccessExpr Member access expression * @return Parsed node */ private STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier, STNode memberAccessExpr) { STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true); switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); return STNodeFactory.createSpecificFieldNode(identifier, colon, expr); case EQUAL_TOKEN: case SEMICOLON_TOKEN: default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode qualifiedName = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr); return parseStatementStartWithExpr(updatedExpr); } } /** * Replace the first identifier of an expression, with a given qualified-identifier. * Only expressions that can start with "bar[..]" can reach here. * * @param qualifiedName Qualified identifier to replace simple identifier * @param exprOrAction Expression or action * @return Updated expression */ private STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) { switch (exprOrAction.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction; STNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr); return STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator, binaryExpr.rhsExpr); case FIELD_ACCESS: STFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken, fieldAccess.fieldName); case INDEXED_EXPRESSION: STIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression); return STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket, memberAccess.keyExpression, memberAccess.closeBracket); case TYPE_TEST_EXPRESSION: STTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression); return STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword, typeTest.typeDescriptor); case ANNOT_ACCESS: STAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken, annotAccess.annotTagReference); case OPTIONAL_FIELD_ACCESS: STOptionalFieldAccessExpressionNode optionalFieldAccess = (STOptionalFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, optionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName); case CONDITIONAL_EXPRESSION: STConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression); return STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken, conditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression); case REMOTE_METHOD_CALL_ACTION: STRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression); return STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken, remoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments, remoteCall.closeParenToken); case ASYNC_SEND_ACTION: STAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken, asyncSend.peerWorker); case SYNC_SEND_ACTION: STSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker); default: return exprOrAction; } } private STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) { switch (typeDesc.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case ARRAY_TYPE_DESC: STArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc; STNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc); return STNodeFactory.createArrayTypeDescriptorNode(newMemberType, arrayTypeDesc.openBracket, arrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket); case UNION_TYPE_DESC: STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc; STNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(newlhsType, unionTypeDesc.pipeToken, unionTypeDesc.rightTypeDesc); case INTERSECTION_TYPE_DESC: STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc; newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(newlhsType, intersectionTypeDesc.bitwiseAndToken, intersectionTypeDesc.rightTypeDesc); case OPTIONAL_TYPE_DESC: STOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc; newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor); return STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken); default: return typeDesc; } } private List<STNode> getTypeDescList(List<STNode> ambibuousList) { List<STNode> typeDescList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { if (item.kind != SyntaxKind.BRACKETED_LIST) { typeDescList.add(item); continue; } STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) item; STNode memberTypeDescList = STNodeFactory.createNodeList(getTypeDescList(innerList.members)); STNode typeDesc = STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescList, innerList.collectionEndToken); typeDescList.add(typeDesc); } return typeDescList; } private List<STNode> getBindingPatternsList(List<STNode> ambibuousList) { List<STNode> bindingPatterns = new ArrayList<STNode>(); for (STNode item : ambibuousList) { bindingPatterns.add(getBindingPattern(item)); } return bindingPatterns; } private STNode getBindingPattern(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name; return createCaptureOrWildcardBP(varName); case QUALIFIED_NAME_REFERENCE: STQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode; return STNodeFactory.createFieldBindingPatternFullNode(qualifiedName.modulePrefix, qualifiedName.colon, getBindingPattern(qualifiedName.identifier)); case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members)); restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; return STNodeFactory.createFieldBindingPatternFullNode(field.fieldName, field.colon, getBindingPattern(field.valueExpr)); default: return ambiguousNode; } } private List<STNode> getExpressionList(List<STNode> ambibuousList) { List<STNode> exprList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { exprList.add(getExpression(item)); } return exprList; } private STNode getExpression(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members)); return STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members)); return STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken, memberExprs, innerList.collectionEndToken); case REST_BINDING_PATTERN: STRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode; return STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken, restBindingPattern.variableName); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; return STNodeFactory.createSpecificFieldNode(field.fieldName, field.colon, getExpression(field.valueExpr)); case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return ambiguousNode; } } private STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) { STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); switch (bindingPatternOrExpr.kind) { case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: return STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr); case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: return STNodeFactory.createSpecificFieldNode(simpleNameRef, colon, identifier); case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); return STNodeFactory.createSpecificFieldNode(identifier, colon, bindingPatternOrExpr); } } }
class BallerinaParser extends AbstractParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; protected BallerinaParser(AbstractTokenReader tokenReader) { super(tokenReader, new BallerinaParserErrorHandler(tokenReader)); } /** * Start parsing the given input. * * @return Parsed node */ @Override public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ @Override public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody((boolean) args[0]); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: case ARG_LIST_START: return parseOpenParenthesis((ParserRuleContext) args[0]); case SIMPLE_TYPE_DESCRIPTOR: return parseSimpleTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3]); case PARAMETER_START: return parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3], (boolean) args[4]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (boolean) args[5]); case PARAMETER_NAME_RHS: return parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptorInternal((ParserRuleContext) args[0]); case OBJECT_MEMBER_START: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case SERVICE_NAME: case IMPLICIT_ANON_FUNC_PARAM: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: case FIRST_MAPPING_FIELD: return parseMappingField((ParserRuleContext) args[0], (STNode) args[1]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEF_OR_FUNC_TYPE: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStatementStartWithExpr((STNode) args[0], (STNode) args[1]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_IDENTIFIER_RHS: return parseStatementStartIdentifierRhs((STNode) args[0], (STNode) args[1]); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); case IN_KEYWORD: return parseInKeyword(); case FOREACH_KEYWORD: return parseForEachKeyword(); case TABLE_KEYWORD: return parseTableKeyword(); case KEY_KEYWORD: return parseKeyKeyword(); case TABLE_KEYWORD_RHS: return parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]); case ERROR_KEYWORD: return parseErrorKeyWord(); case LET_KEYWORD: return parseLetKeyword(); case STREAM_KEYWORD: return parseStreamKeyword(); case STREAM_TYPE_FIRST_PARAM_RHS: return parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]); case TEMPLATE_START: case TEMPLATE_END: return parseBacktickToken(context); case KEY_CONSTRAINTS_RHS: return parseKeyConstraint((STNode) args[0]); case FUNCTION_KEYWORD_RHS: return parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3], (boolean) args[3]); case FUNC_OPTIONAL_RETURNS: return parseFuncReturnTypeDescriptor(); case RETURNS_KEYWORD: return parseReturnsKeyword(); case NEW_KEYWORD_RHS: return parseNewKeywordRhs((STNode) args[0]); case NEW_KEYWORD: return parseNewKeyword(); case IMPLICIT_NEW: return parseImplicitNewRhs((STNode) args[0]); case FROM_KEYWORD: return parseFromKeyword(); case WHERE_KEYWORD: return parseWhereKeyword(); case SELECT_KEYWORD: return parseSelectKeyword(); case TABLE_CONSTRUCTOR_OR_QUERY_START: return parseTableConstructorOrQuery((boolean) args[0]); case TABLE_CONSTRUCTOR_OR_QUERY_RHS: return parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]); case QUERY_PIPELINE_RHS: return parseIntermediateClause((boolean) args[0]); case ANON_FUNC_BODY: return parseAnonFuncBody(); case CLOSE_BRACKET: return parseCloseBracket(); case ARG_START_OR_ARG_LIST_END: return parseArg((STNode) args[0]); case ARG_END: return parseArgEnd(); case MAPPING_FIELD_END: return parseMappingFieldEnd(); case FUNCTION_KEYWORD: return parseFunctionKeyword(); case FIELD_OR_REST_DESCIPTOR_RHS: return parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]); case TYPE_DESC_IN_TUPLE_RHS: return parseTupleMemberRhs(); case LIST_BINDING_PATTERN_END_OR_CONTINUE: return parseListBindingpatternRhs(); case CONSTANT_EXPRESSION_START: return parseConstExprInternal(); case LIST_CONSTRUCTOR_MEMBER_END: return parseListConstructorMemberEnd(); case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: return parseNilOrParenthesisedTypeDescRhs((STNode) args[0]); case ANON_FUNC_PARAM_RHS: return parseImplicitAnonFuncParamEnd(); case CAPTURE_BINDING_PATTERN: return parseCaptureBindingPattern(); case LIST_BINDING_PATTERN: return parseListBindingPattern(); case BINDING_PATTERN: return parseBindingPattern(); case PEER_WORKER_NAME: return parsePeerWorkerName(); case SYNC_SEND_TOKEN: return parseSyncSendToken(); case LEFT_ARROW_TOKEN: return parseLeftArrowToken(); case RECEIVE_WORKERS: return parseReceiveWorkers(); case WAIT_KEYWORD: return parseWaitKeyword(); case WAIT_FUTURE_EXPR_END: return parseWaitFutureExprEnd((int) args[0]); case WAIT_FIELD_NAME: return parseWaitField(); case WAIT_FIELD_END: return parseWaitFieldEnd(); case ANNOT_CHAINING_TOKEN: return parseAnnotChainingToken(); case FIELD_ACCESS_IDENTIFIER: return parseFieldAccessIdentifier(); case DO_KEYWORD: return parseDoKeyword(); case MEMBER_ACCESS_KEY_EXPR_END: return parseMemberAccessKeyExprEnd(); case OPTIONAL_CHAINING_TOKEN: return parseOptionalChainingToken(); case RETRY_KEYWORD_RHS: return parseRetryKeywordRhs((STNode) args[0]); case RETRY_TYPE_PARAM_RHS: return parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]); case TRANSACTION_KEYWORD: return parseTransactionKeyword(); case COMMIT_KEYWORD: return parseCommitKeyword(); case RETRY_KEYWORD: return parseRetryKeyword(); case ROLLBACK_KEYWORD: return parseRollbackKeyword(); case RETRY_BODY: return parseRetryBody(); case ENUM_MEMBER_INTERNAL_RHS: return parseEnumMemberInternalRhs((STNode) args[0], (STNode) args[1]); case ENUM_MEMBER_RHS: return parseEnumMemberRhs(); case ENUM_MEMBER_NAME: return parseEnumMember(); case BRACKETED_LIST_MEMBER_END: return parseBracketedListMemberEnd(); case LIST_BP_OR_TUPLE_TYPE_MEMBER: return parseListBindingPatternOrTupleTypeAmbiguousMember(); case TYPED_BINDING_PATTERN_TYPE_RHS: return parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1]); case BRACKETED_LIST_RHS: return parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4], (ParserRuleContext) args[5]); case UNION_OR_INTERSECTION_TOKEN: return parseUnionOrIntersectionToken(); case BRACKETED_LIST_MEMBER: case LIST_BINDING_MEMBER_OR_ARRAY_LENGTH: return parseBracketedListMember((boolean) args[0]); case BASE16_KEYWORD: return parseBase16Keyword(); case BASE64_KEYWORD: return parseBase64Keyword(); case DOT_LT_TOKEN: return parseDotLTToken(); default: throw new IllegalStateException("cannot resume parsing the rule: " + context); } } /* * Private methods. */ /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: case ENUM_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param metadata Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case ENUM_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.startMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.endMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFuncDefOrFuncTypeDesc(metadata, getQualifier(qualifier), false); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case ENUM_KEYWORD: return parseEnumDeclaration(metadata, getQualifier(qualifier)); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseFuncDefinition(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) { startContext(ParserRuleContext.FUNC_DEF); STNode functionKeyword = parseFunctionKeyword(); STNode funcDef = parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, true, isObjectMethod); return funcDef; } /** * Parse function definition for the function type descriptor. * <p> * <code> * function-defn := FUNCTION identifier function-signature function-body * <br/> * function-type-descriptor := function function-signature * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) { startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE); STNode functionKeyword = parseFunctionKeyword(); STNode funcDefOrType = parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, false, isObjectMethod); return funcDefOrType; } private STNode parseFunctionKeywordRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isFuncDef, boolean isObjectMethod) { return parseFunctionKeywordRhs(peek().kind, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); } private STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isFuncDef, boolean isObjectMethod) { STNode name; switch (nextTokenKind) { case IDENTIFIER_TOKEN: name = parseFunctionName(); isFuncDef = true; break; case OPEN_PAREN_TOKEN: name = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFunctionKeywordRhs(solution.tokenKind, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); } if (isFuncDef) { switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name, funcSignature); endContext(); return funcDef; } STNode funcSignature = parseFuncSignature(true); return parseReturnTypeDescRhs(metadata, visibilityQualifier, functionKeyword, funcSignature, isObjectMethod); } private STNode createFuncDefOrMethodDecl(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isObjectMethod, STNode name, STNode funcSignature) { STNode body = parseFunctionBody(isObjectMethod); if (body.kind == SyntaxKind.SEMICOLON_TOKEN) { return STNodeFactory.createFunctionDeclarationNode(metadata, visibilityQualifier, functionKeyword, name, funcSignature, body); } return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, funcSignature, body); } /** * Parse function signature. * <p> * <code> * function-signature := ( param-list ) return-type-descriptor * <br/> * return-type-descriptor := [ returns [annots] type-descriptor ] * </code> * * @param isParamNameOptional Whether the parameter names are optional * @param isInExprContext Whether this function signature is occurred within an expression context * @return Function signature node */ private STNode parseFuncSignature(boolean isParamNameOptional) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode parameters = parseParamList(isParamNameOptional); STNode closeParenthesis = parseCloseParenthesis(); endContext(); STNode returnTypeDesc = parseFuncReturnTypeDescriptor(); return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc); } private STNode parseReturnTypeDescRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, STNode funcSignature, boolean isObjectMethod) { switch (peek().kind) { case SEMICOLON_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: endContext(); STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); if (isObjectMethod) { STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, visibilityQualifier, typeDesc, fieldName); } startContext(ParserRuleContext.VAR_DECL_STMT); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); STNode varDecl = parseVarDeclRhs(metadata, visibilityQualifier, typedBindingPattern, true); endContext(); return varDecl; case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: break; default: break; } STNode name = errorHandler.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ERROR_MISSING_FUNCTION_NAME); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcDef = createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name, funcSignature); endContext(); return funcDef; } /** * Validate the param list and return. If there are params without param-name, * then this method will create a new set of params with missing param-name * and return. * * @param signature Function signature * @return */ private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) { STNode parameters = signature.parameters; int paramCount = parameters.bucketCount(); int index = 0; for (; index < paramCount; index++) { STNode param = parameters.childInBucket(index); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { break; } continue; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { break; } continue; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { break; } continue; default: continue; } break; } if (index == paramCount) { return signature; } STNode updatedParams = getUpdatedParamList(parameters, index); return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams, signature.closeParenToken, signature.returnTypeDesc); } private STNode getUpdatedParamList(STNode parameters, int index) { int paramCount = parameters.bucketCount(); int newIndex = 0; ArrayList<STNode> newParams = new ArrayList<>(); for (; newIndex < index; newIndex++) { newParams.add(parameters.childInBucket(index)); } for (; newIndex < paramCount; newIndex++) { STNode param = parameters.childInBucket(newIndex); STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { param = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma, requiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName, paramName); } break; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { param = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma, defaultableParam.annotations, defaultableParam.visibilityQualifier, defaultableParam.typeName, paramName, defaultableParam.equalsToken, defaultableParam.expression); } break; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { param = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations, restParam.typeName, restParam.ellipsisToken, paramName); } break; default: break; } newParams.add(param); } return STNodeFactory.createNodeList(newParams); } private boolean isEmpty(STNode node) { return node == null; } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @param ctx Context of the parenthesis * @return Parsed node */ private STNode parseOpenParenthesis(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ctx, ctx); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParamList(boolean isParamNameOptional) { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); startContext(ParserRuleContext.REQUIRED_PARAM); STNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional); SyntaxKind prevParamKind = firstParam.kind; paramsList.add(firstParam); token = peek(); while (!isEndOfParametersList(token.kind)) { switch (prevParamKind) { case REST_PARAM: this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); break; case DEFAULTABLE_PARAM: startContext(ParserRuleContext.DEFAULTABLE_PARAM); break; case REQUIRED_PARAM: default: startContext(ParserRuleContext.REQUIRED_PARAM); break; } STNode paramEnd = parseParameterRhs(token.kind); if (paramEnd == null) { endContext(); break; } STNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional); prevParamKind = param.kind; paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); return params; } private STNode parseParameterRhs(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAM_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind); } } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param prevParamKind Kind of the parameter that precedes current parameter * @param leadingComma Comma that occurs before the param * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) { STToken token = peek(); return parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional); } private STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset, boolean isParamNameOptional) { return parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional); } private STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset, boolean isParamNameOptional) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: case IDENTIFIER_TOKEN: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; default: if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional); } return parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional); } private STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, int nextNextTokenOffset, boolean isFuncDef) { return parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, int nextTokenOffset, boolean isParamNameOptional) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: qualifier = STNodeFactory.createEmptyNode(); break; case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind, leadingComma, annots, nextTokenOffset, isParamNameOptional); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0, isParamNameOptional); } return parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional); } private STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, boolean isParamNameOptional) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); endContext(); return param; } private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) { STToken token = peek(); return parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); } private STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) { STNode paramName; switch (tokenKind) { case ELLIPSIS_TOKEN: switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { paramName = STNodeFactory.createEmptyNode(); } else { paramName = parseVariableName(); } return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); case EQUAL_TOKEN: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); default: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param prevParamKind Kind of the parameter that precedes current parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (prevParamKind == SyntaxKind.REQUIRED_PARAM) { switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseFuncReturnTypeDescriptor() { return parseFuncReturnTypeDescriptor(peek().kind); } private STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: return STNodeFactory.createEmptyNode(); case RETURNS_KEYWORD: break; default: STToken nextNextToken = getNextNextToken(nextTokenKind); if (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) { break; } return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = parseReturnsKeyword(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse 'returns' keyword. * * @return Return-keyword node */ private STNode parseReturnsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor(ParserRuleContext context) { return parseTypeDescriptor(context, false); } private STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern) { startContext(context); STNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern); endContext(); return typeDesc; } private STNode parseTypeDescriptorInternal(ParserRuleContext context) { return parseTypeDescriptorInternal(context, false); } private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern) { STToken token = peek(); STNode typeDesc = parseTypeDescriptorInternal(token.kind, context); return parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context, isTypedBindingPattern); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { return typeDesc; } return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern); case PIPE_TOKEN: return parseUnionTypeDescriptor(typeDesc, context); case BITWISE_AND_TOKEN: return parseIntersectionTypeDescriptor(typeDesc, context); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @param context Current context * @return Parsed node */ private STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilOrParenthesisedTypeDesc(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); case ERROR_KEYWORD: return parseErrorTypeDescriptor(); case STREAM_KEYWORD: return parseStreamTypeDescriptor(); case TABLE_KEYWORD: return parseTableTypeDescriptor(); case FUNCTION_KEYWORD: return parseFunctionTypeDesc(); case OPEN_BRACKET_TOKEN: return parseTupleTypeDesc(); default: if (isSingletonTypeDescStart(tokenKind, true)) { return parseSingletonTypeDesc(); } if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptorInternal(solution.tokenKind, context); } } private STNode parseNilOrParenthesisedTypeDesc() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); return parseNilOrParenthesisedTypeDescRhs(openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) { return parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) { STNode closeParen; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: closeParen = parseCloseParenthesis(); return STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen); default: if (isTypeStartingToken(nextTokenKind)) { STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS); closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ private STNode parseFunctionBody(boolean isObjectMethod) { STToken token = peek(); return parseFunctionBody(token.kind, isObjectMethod); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(false); case RIGHT_DOUBLE_ARROW_TOKEN: return parseExpressionFuncBody(false); case SEMICOLON_TOKEN: if (isObjectMethod) { return parseSemicolon(); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind, isObjectMethod); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function * @return Parsed node */ private STNode parseFunctionBodyBlock(boolean isAnonFunc) { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) { if (isAnonFunc) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case EQUAL_TOKEN: case BACKTICK_TOKEN: return true; default: break; } } return isEndOfStatements(); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case TYPE_KEYWORD: case PUBLIC_KEYWORD: default: return endOfModuleLevelNode(1); } } private boolean isEndOfObjectTypeNode() { return endOfModuleLevelNode(1, true); } private boolean isEndOfStatements() { switch (peek().kind) { case RESOURCE_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } private boolean endOfModuleLevelNode(int peekIndex) { return endOfModuleLevelNode(peekIndex, false); } private boolean endOfModuleLevelNode(int peekIndex, boolean isObject) { switch (peek(peekIndex).kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case LISTENER_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); case PUBLIC_KEYWORD: return endOfModuleLevelNode(peekIndex + 1, isObject); case FUNCTION_KEYWORD: if (isObject) { return false; } return peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN; default: return false; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case PERCENT_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: case PERCENT_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.REMOTE_CALL_ACTION; case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: return OperatorPrecedence.ACTION; case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return OperatorPrecedence.SHIFT; case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: return OperatorPrecedence.RANGE; case ELVIS_TOKEN: return OperatorPrecedence.ELVIS_CONDITIONAL; case QUESTION_MARK_TOKEN: case COLON_TOKEN: return OperatorPrecedence.CONDITIONAL; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case EXPRESSION_ACTION: case REMOTE_CALL_ACTION: case ANON_FUNC_OR_LET: case QUERY: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case SHIFT: return SyntaxKind.DOUBLE_LT_TOKEN; case RANGE: return SyntaxKind.ELLIPSIS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; case ELVIS_CONDITIONAL: return SyntaxKind.ELVIS_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); STNode typeRefOrPkgRef; if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { typeRefOrPkgRef = consume(); } else { Solution sol = recover(token, currentCtx); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } typeRefOrPkgRef = sol.recoveredNode; } return parseQualifiedIdentifier(typeRefOrPkgRef); } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind, 1); } private STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidExpressionStart(tokenKind, nextTokenIndex)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex); if (solution.action == Action.KEEP) { break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, nextTokenIndex); } return parseStatement(tokenKind, annots, nextTokenIndex); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots, 1); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case IDENTIFIER_TOKEN: return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseBlockNode(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); case START_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case FROM_KEYWORD: case COMMIT_KEYWORD: return parseExpressionStament(tokenKind, getAnnotations(annots)); case XMLNS_KEYWORD: return parseXMLNamepsaceDeclaration(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); case RETRY_KEYWORD: return parseRetryStatement(); case ROLLBACK_KEYWORD: return parseRollbackStatement(); case OPEN_BRACKET_TOKEN: return parseDestructureAssignmentOrVarDecl(getAnnotations(annots)); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } if (isValidExpressionStart(tokenKind, nextTokenIndex)) { return parseStamentStartWithExpr(tokenKind, getAnnotations(annots)); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex); if (solution.action == Action.KEEP) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots, nextTokenIndex - 1); } } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar); endContext(); return varDecl; } /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(peek().kind, typeDesc, context); } private STNode parseTypedBindingPatternTypeRhs(SyntaxKind nextTokenKind, STNode typeDesc, ParserRuleContext context) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(nextTokenKind); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; default: Solution solution = recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypedBindingPatternTypeRhs(solution.tokenKind, typeDesc, context); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case NONE: default: break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(member); memberList.add(memberEnd); STNode bindingPattern = parseAsListBindingPattern(openBracket, memberList); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, true, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseBracketedListMember(boolean isTypedBindingPattern) { return parseBracketedListMember(peek().kind, isTypedBindingPattern); } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param nextTokenKind Kind of the next token * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(SyntaxKind nextTokenKind, boolean isTypedBindingPattern) { switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case ERROR_KEYWORD: case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); nextTokenKind = peek().kind; if (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } return identifier; } break; default: if (!isTypedBindingPattern && isValidExpressionStart(nextTokenKind, 1)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; Solution solution = recover(peek(), recoverContext, isTypedBindingPattern); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseBracketedListMember(solution.tokenKind, isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return STNodeFactory.createCaptureBindingPatternNode(expr); } if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { nextTokenKind = peek().kind; if (nextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseListBindingPatternMember(); } } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, context); } private STNode parseBracketedListMemberEnd() { return parseBracketedListMemberEnd(peek().kind); } private STNode parseBracketedListMemberEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseBracketedListMemberEnd(solution.tokenKind); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, ParserRuleContext context) { STToken nextToken = peek(); return parseTypedBindingPatternOrMemberAccessRhs(nextToken.kind, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, context); } private STNode parseTypedBindingPatternOrMemberAccessRhs(SyntaxKind nextTokenKind, STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, ParserRuleContext context) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = STNodeFactory.createNodeList(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(typeDesc, openBracket, member, closeBracket); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket, context); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT) { break; } if (isTypedBindingPattern) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); default: if (isValidExprRhsStart(nextTokenKind)) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } Solution solution = recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket, true, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypedBindingPatternOrMemberAccessRhs(solution.tokenKind, typeDescOrExpr, openBracket, member, closeBracket, true, context); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns; if (isEmpty(member)) { bindingPatterns = STNodeFactory.createNodeList(); } else { bindingPatterns = STNodeFactory.createNodeList(STNodeFactory.createCaptureBindingPatternNode(member)); } STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, restBindingPattern, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type that involves array type-desc in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @param context COntext in which the typed binding pattern occurs * @return Parsed node */ private STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, ParserRuleContext context) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = STNodeFactory.createArrayTypeDescriptorNode(lhsTypeDesc, openBracket, member, closeBracket); STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) parseTypedBindingPattern(context); STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = STNodeFactory.createUnionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = STNodeFactory.createIntersectionTypeDescriptorNode(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return sol.recoveredNode; } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (memberNode.kind.compareTo(SyntaxKind.TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_TOKEN: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_TUPLE_TYPE_DESC: return SyntaxKind.NONE; default: return SyntaxKind.INDEXED_EXPRESSION; } } /** * Create a type-desc out of an expression. * * @param expression Expression * @return Type descriptor */ private STNode getTypeDescFromExpr(STNode expression) { switch (expression.kind) { case INDEXED_EXPRESSION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return expression; } STIndexedExpressionNode indexedExpr = (STIndexedExpressionNode) expression; STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNode arrayLength = getArrayLength((STNodeList) indexedExpr.keyExpression); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, indexedExpr.openBracket, arrayLength, indexedExpr.closeBracket); } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, typedBindingPattern, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ protected STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BINDING_PATTERN: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVExpr(((STIndexedExpressionNode) expression).containerExpression); default: return (expression instanceof STMissingToken); } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param annots Annotations * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { STNode annots; if (kind == SyntaxKind.AT_TOKEN) { annots = parseAnnotations(); kind = peek().kind; } else { annots = STNodeFactory.createEmptyNode(); } STNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions); if (!isEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) { this.errorHandler.reportInvalidNode(null, "annotations are not supported for expressions"); } return expr; } private STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: STToken nextNextToken = getNextNextToken(kind); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr, allowActions); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(isRhsExpr); case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: return parseTableConstructorOrQuery(isRhsExpr); case ERROR_KEYWORD: return parseErrorConstructorExpr(); case LET_KEYWORD: return parseLetExpression(isRhsExpr); case BACKTICK_TOKEN: return parseTemplateExpression(); case XML_KEYWORD: nextNextToken = getNextNextToken(kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseXMLTemplateExpression(); } break; case STRING_KEYWORD: nextNextToken = getNextNextToken(kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseStringTemplateExpression(); } break; case FUNCTION_KEYWORD: return parseExplicitFunctionExpression(annots); case AT_TOKEN: break; case NEW_KEYWORD: return parseNewExpression(); case START_KEYWORD: return parseStartAction(annots); case FLUSH_KEYWORD: return parseFlushAction(); case LEFT_ARROW_TOKEN: return parseReceiveAction(); case WAIT_KEYWORD: return parseWaitAction(); case COMMIT_KEYWORD: return parseCommitAction(); case TRANSACTIONAL_KEYWORD: return parseTransactionalExpression(); case SERVICE_KEYWORD: return parseServiceConstructorExpression(annots); case BASE16_KEYWORD: case BASE64_KEYWORD: return parseByteArrayLiteral(kind); default: break; } Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { if (kind == SyntaxKind.XML_KEYWORD) { return parseXMLTemplateExpression(); } return parseStringTemplateExpression(); } switch (solution.tokenKind) { case IDENTIFIER_TOKEN: this.errorHandler.reportMissingTokenError("missing " + solution.ctx); return parseQualifiedIdentifier(solution.recoveredNode); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: this.errorHandler.reportMissingTokenError("missing " + solution.ctx); return solution.recoveredNode; default: return parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions); } } private boolean isValidExprStart(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case AT_TOKEN: case NEW_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * <p> * Parse a new expression. * </p> * <code> * new-expr := explicit-new-expr | implicit-new-expr * <br/> * explicit-new-expr := new type-descriptor ( arg-list ) * <br/> * implicit-new-expr := new [( arg-list )] * </code> * * @return Parsed NewExpression node. */ private STNode parseNewExpression() { STNode newKeyword = parseNewKeyword(); return parseNewKeywordRhs(newKeyword); } /** * <p> * Parse `new` keyword. * </p> * * @return Parsed NEW_KEYWORD Token. */ private STNode parseNewKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.NEW_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.NEW_KEYWORD); return sol.recoveredNode; } } private STNode parseNewKeywordRhs(STNode newKeyword) { STNode token = peek(); return parseNewKeywordRhs(token.kind, newKeyword); } /** * <p> * Parse an implicit or explicit expression. * </p> * * @param kind next token kind. * @param newKeyword parsed node for `new` keyword. * @return Parsed new-expression node. */ private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) { switch (kind) { case OPEN_PAREN_TOKEN: return parseImplicitNewRhs(newKeyword); case SEMICOLON_TOKEN: break; case IDENTIFIER_TOKEN: case OBJECT_KEYWORD: return parseTypeDescriptorInNewExpr(newKeyword); default: break; } return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode()); } /** * <p> * Parse an Explicit New expression. * </p> * <code> * explicit-new-expr := new type-descriptor ( arg-list ) * </code> * * @param newKeyword Parsed `new` keyword. * @return the Parsed Explicit New Expression. */ private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList); } /** * <p> * Parse an <code>implicit-new-expr</code> with arguments. * </p> * * @param newKeyword Parsed `new` keyword. * @return Parsed implicit-new-expr. */ private STNode parseImplicitNewRhs(STNode newKeyword) { STNode implicitNewArgList = parseParenthesizedArgList(); return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList); } /** * <p> * Parse the parenthesized argument list for a <code>new-expr</code>. * </p> * * @return Parsed parenthesized rhs of <code>new-expr</code>. */ private STNode parseParenthesizedArgList() { STNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParan = parseCloseParenthesis(); return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (lhsExpr != null && lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } if (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) { if (peek(3).kind == SyntaxKind.GT_TOKEN) { tokenKind = SyntaxKind.TRIPPLE_GT_TOKEN; } else { tokenKind = SyntaxKind.DOUBLE_GT_TOKEN; } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence, allowActions)) { return lhsExpr; } STNode newLhsExpr; STNode operator; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; case SYNC_SEND_TOKEN: newLhsExpr = parseSyncSendAction(lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; case RIGHT_DOUBLE_ARROW_TOKEN: newLhsExpr = parseImplicitAnonFunc(lhsExpr); break; case ANNOT_CHAINING_TOKEN: newLhsExpr = parseAnnotAccessExpression(lhsExpr); break; case OPTIONAL_CHAINING_TOKEN: newLhsExpr = parseOptionalFieldAccessExpression(lhsExpr); break; case QUESTION_MARK_TOKEN: newLhsExpr = parseConditionalExpression(lhsExpr); break; case DOT_LT_TOKEN: newLhsExpr = parseXMLFilterExpression(lhsExpr); break; case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: newLhsExpr = parseXMLStepExpression(lhsExpr); break; default: if (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) { operator = parseSignedRightShiftToken(); } else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) { operator = parseUnsignedRightShiftToken(); } else { operator = parseBinaryOperator(); } STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case QUESTION_MARK_TOKEN: case COLON_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @param isRhsExpr Is this is a rhs expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) { startContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR); STNode openBracket = parseOpenBracket(); STNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse key expression of a member access expression. A type descriptor * that starts with a type-ref (e.g: T[a][b]) also goes through this * method. * <p> * <code>key-expression := single-key-expression | multi-key-expression</code> * * @param isRhsExpr Is this is a rhs expression * @return Key expression */ private STNode parseMemberAccessKeyExprs(boolean isRhsExpr) { List<STNode> exprList = new ArrayList<>(); STNode keyExpr; STNode keyExprEnd; while (!isEndOfTypeList(peek().kind)) { keyExpr = parseKeyExpr(isRhsExpr); exprList.add(keyExpr); keyExprEnd = parseMemberAccessKeyExprEnd(); if (keyExprEnd == null) { break; } exprList.add(keyExprEnd); } if (isRhsExpr && exprList.isEmpty()) { exprList.add(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); this.errorHandler.reportInvalidNode(null, "missing key expression"); } if (!isRhsExpr && exprList.size() > 1) { this.errorHandler.reportInvalidNode(null, "cannot have multiple keys"); } return STNodeFactory.createNodeList(exprList); } private STNode parseKeyExpr(boolean isRhsExpr) { if (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) { return STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume()); } return parseExpression(isRhsExpr); } private STNode parseMemberAccessKeyExprEnd() { return parseMemberAccessKeyExprEnd(peek().kind); } private STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMemberAccessKeyExprEnd(solution.tokenKind); } } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access, xml required attribute access expressions or method call expression. * <p> * <code> * field-access-expr := expression . field-name * <br/> * xml-required-attribute-access-expr := expression . xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * method-call-expr := expression . method-name ( arg-list ) * </code> * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseFieldAccessIdentifier(); if (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); STNode expr; if (allowActions) { expr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } else { expr = parseExpression(isRhsExpr); } if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr); } STNode closeParen = parseCloseParenthesis(); endContext(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } private STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr) { switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: STNode closeParen = parseCloseParenthesis(); STNode bracedEXprOrAnonFuncParam; if (isAction(expr)) { bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } endContext(); return bracedEXprOrAnonFuncParam; case COMMA_TOKEN: return parseImplicitAnonFunc(openParen, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS); if (solution.action == Action.REMOVE) { endContext(); return solution.recoveredNode; } return parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: case START_ACTION: case TRAP_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: case IN_KEYWORD: case BACKTICK_TOKEN: case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case DO_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * <p> * Parse error constructor expression. * </p> * <code> * error-constructor-expr := error ( arg-list ) * </code> * * @return Error constructor expression */ private STNode parseErrorConstructorExpr() { return parseFuncCall(parseErrorKeyWord()); } /** * Parse function call argument list. * * @return Parsed args list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); if (arg == null) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind; if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); lastProcessedArgKind = arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); lastProcessedArgKind = SyntaxKind.POSITIONAL_ARG; } parseFollowUpArgs(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArgs(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { STNode argEnd = parseArgEnd(nextToken.kind); if (argEnd == null) { break; } nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) argEnd, "invalid token " + argEnd); break; } STNode arg = parseArg(nextToken.kind, argEnd); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); } } private STNode parseArgEnd() { return parseArgEnd(peek().kind); } private STNode parseArgEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ARG_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseArgEnd(solution.tokenKind); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma, kind); break; case CLOSE_PAREN_TOKEN: return null; default: if (isValidExprStart(kind)) { expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } Solution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseArg(solution.tokenKind, leadingComma); } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma, SyntaxKind nextTokenKind) { STNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = parseAssignOp(); STNode valExpr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrExpr, equal, valExpr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr); default: argNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); while (!isEndOfObjectTypeNode()) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(peek().kind); endContext(); if (member == null) { break; } objectMembers.add(member); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (!isEmpty(visibilityQualifiers)) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFuncDefOrFuncTypeDesc(metadata, methodQualifiers, true); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } STNode semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode mappingFieldEnd = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD, mappingFieldEnd); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { mappingFieldEnd = parseMappingFieldEnd(nextToken.kind); if (mappingFieldEnd == null) { break; } field = parseMappingField(ParserRuleContext.MAPPING_FIELD, mappingFieldEnd); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private STNode parseMappingFieldEnd() { return parseMappingFieldEnd(peek().kind); } private STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingFieldEnd(solution.tokenKind); } } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param fieldContext Context of the mapping field * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(ParserRuleContext fieldContext, STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, fieldContext, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); case CLOSE_BRACE_TOKEN: if (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) { return null; } default: STToken token = peek(); Solution solution = recover(token, fieldContext, fieldContext, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, fieldContext, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfExpressionsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return false; case EOF_TOKEN: case SEMICOLON_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case OPEN_BRACE_TOKEN: return true; default: return !isValidExprStart(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFuncDefinition(metadata, resourceKeyword, false); case FUNCTION_KEYWORD: return parseFuncDefinition(metadata, STNodeFactory.createEmptyNode(), false); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */
class BallerinaParser extends AbstractParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; protected BallerinaParser(AbstractTokenReader tokenReader) { super(tokenReader, new BallerinaParserErrorHandler(tokenReader)); } /** * Start parsing the given input. * * @return Parsed node */ @Override public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ @Override public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody((boolean) args[0]); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: case ARG_LIST_START: return parseOpenParenthesis((ParserRuleContext) args[0]); case SIMPLE_TYPE_DESCRIPTOR: return parseSimpleTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((STNode) args[0], (boolean) args[1], (boolean) args[2]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2], (boolean) args[3]); case PARAMETER_START: return parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3], (boolean) args[4]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (boolean) args[5]); case PARAMETER_NAME_RHS: return parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptorInternal((ParserRuleContext) args[0]); case OBJECT_MEMBER_START: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case SERVICE_NAME: case IMPLICIT_ANON_FUNC_PARAM: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: case FIRST_MAPPING_FIELD: return parseMappingField((ParserRuleContext) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEF_OR_FUNC_TYPE: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStatementStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_IDENTIFIER_RHS: return parseStatementStartIdentifierRhs((STNode) args[0], (STNode) args[1]); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); case IN_KEYWORD: return parseInKeyword(); case FOREACH_KEYWORD: return parseForEachKeyword(); case TABLE_KEYWORD: return parseTableKeyword(); case KEY_KEYWORD: return parseKeyKeyword(); case TABLE_KEYWORD_RHS: return parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]); case ERROR_KEYWORD: return parseErrorKeyWord(); case LET_KEYWORD: return parseLetKeyword(); case STREAM_KEYWORD: return parseStreamKeyword(); case STREAM_TYPE_FIRST_PARAM_RHS: return parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]); case TEMPLATE_START: case TEMPLATE_END: return parseBacktickToken(context); case KEY_CONSTRAINTS_RHS: return parseKeyConstraint((STNode) args[0]); case FUNCTION_KEYWORD_RHS: return parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3], (boolean) args[3]); case FUNC_OPTIONAL_RETURNS: return parseFuncReturnTypeDescriptor(); case RETURNS_KEYWORD: return parseReturnsKeyword(); case NEW_KEYWORD_RHS: return parseNewKeywordRhs((STNode) args[0]); case NEW_KEYWORD: return parseNewKeyword(); case IMPLICIT_NEW: return parseImplicitNewRhs((STNode) args[0]); case FROM_KEYWORD: return parseFromKeyword(); case WHERE_KEYWORD: return parseWhereKeyword(); case SELECT_KEYWORD: return parseSelectKeyword(); case TABLE_CONSTRUCTOR_OR_QUERY_START: return parseTableConstructorOrQuery((boolean) args[0]); case TABLE_CONSTRUCTOR_OR_QUERY_RHS: return parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]); case QUERY_PIPELINE_RHS: return parseIntermediateClause((boolean) args[0]); case ANON_FUNC_BODY: return parseAnonFuncBody(); case CLOSE_BRACKET: return parseCloseBracket(); case ARG_START_OR_ARG_LIST_END: return parseArg((STNode) args[0]); case ARG_END: return parseArgEnd(); case MAPPING_FIELD_END: return parseMappingFieldEnd(); case FUNCTION_KEYWORD: return parseFunctionKeyword(); case FIELD_OR_REST_DESCIPTOR_RHS: return parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]); case TYPE_DESC_IN_TUPLE_RHS: return parseTupleMemberRhs(); case LIST_BINDING_PATTERN_END_OR_CONTINUE: return parseListBindingpatternRhs(); case MAPPING_BINDING_PATTERN_END: return parseMappingBindingpatternEnd(); case FIELD_BINDING_PATTERN_NAME: case FIELD_BINDING_PATTERN: return parseFieldBindingPattern(); case CONSTANT_EXPRESSION_START: return parseConstExprInternal(); case LIST_CONSTRUCTOR_MEMBER_END: return parseListConstructorMemberEnd(); case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: return parseNilOrParenthesisedTypeDescRhs((STNode) args[0]); case ANON_FUNC_PARAM_RHS: return parseImplicitAnonFuncParamEnd(); case CAPTURE_BINDING_PATTERN: return parseCaptureOrWildcardBindingPattern(); case LIST_BINDING_PATTERN: return parseListBindingPattern(); case BINDING_PATTERN: return parseBindingPattern(); case PEER_WORKER_NAME: return parsePeerWorkerName(); case SYNC_SEND_TOKEN: return parseSyncSendToken(); case LEFT_ARROW_TOKEN: return parseLeftArrowToken(); case RECEIVE_WORKERS: return parseReceiveWorkers(); case WAIT_KEYWORD: return parseWaitKeyword(); case WAIT_FUTURE_EXPR_END: return parseWaitFutureExprEnd((int) args[0]); case WAIT_FIELD_NAME: return parseWaitField(); case WAIT_FIELD_END: return parseWaitFieldEnd(); case ANNOT_CHAINING_TOKEN: return parseAnnotChainingToken(); case FIELD_ACCESS_IDENTIFIER: return parseFieldAccessIdentifier(); case DO_KEYWORD: return parseDoKeyword(); case MEMBER_ACCESS_KEY_EXPR_END: return parseMemberAccessKeyExprEnd(); case OPTIONAL_CHAINING_TOKEN: return parseOptionalChainingToken(); case RETRY_KEYWORD_RHS: return parseRetryKeywordRhs((STNode) args[0]); case RETRY_TYPE_PARAM_RHS: return parseRetryTypeParamRhs((STNode) args[0], (STNode) args[1]); case TRANSACTION_KEYWORD: return parseTransactionKeyword(); case COMMIT_KEYWORD: return parseCommitKeyword(); case RETRY_KEYWORD: return parseRetryKeyword(); case ROLLBACK_KEYWORD: return parseRollbackKeyword(); case RETRY_BODY: return parseRetryBody(); case ENUM_MEMBER_END: return parseEnumMemberEnd(); case ENUM_MEMBER_NAME: return parseEnumMember(); case BRACKETED_LIST_MEMBER_END: return parseBracketedListMemberEnd(); case STMT_START_BRACKETED_LIST_MEMBER: return parseStatementStartBracketedListMember(); case TYPED_BINDING_PATTERN_TYPE_RHS: return parseTypedBindingPatternTypeRhs((STNode) args[0], (ParserRuleContext) args[1]); case BRACKETED_LIST_RHS: return parseTypedBindingPatternOrMemberAccessRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4], (ParserRuleContext) args[5]); case UNION_OR_INTERSECTION_TOKEN: return parseUnionOrIntersectionToken(); case BRACKETED_LIST_MEMBER: case LIST_BINDING_MEMBER_OR_ARRAY_LENGTH: return parseBracketedListMember((boolean) args[0]); case BASE16_KEYWORD: return parseBase16Keyword(); case BASE64_KEYWORD: return parseBase64Keyword(); case DOT_LT_TOKEN: return parseDotLTToken(); case SLASH_LT_TOKEN: return parseSlashLTToken(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: return parseDoubleSlashDoubleAsteriskLTToken(); case XML_ATOMIC_NAME_PATTERN_START: return parseXMLAtomicNamePatternBody(); default: throw new IllegalStateException("cannot resume parsing the rule: " + context); } } /* * Private methods. */ /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl == null) { break; } if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: case ENUM_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param metadata Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return null; case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case ENUM_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.startMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.endMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFuncDefOrFuncTypeDesc(metadata, getQualifier(qualifier), false); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case ENUM_KEYWORD: return parseEnumDeclaration(metadata, getQualifier(qualifier)); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseFuncDefinition(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) { startContext(ParserRuleContext.FUNC_DEF); STNode functionKeyword = parseFunctionKeyword(); STNode funcDef = parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, true, isObjectMethod); return funcDef; } /** * Parse function definition for the function type descriptor. * <p> * <code> * function-defn := FUNCTION identifier function-signature function-body * <br/> * function-type-descriptor := function function-signature * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) { startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE); STNode functionKeyword = parseFunctionKeyword(); STNode funcDefOrType = parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, false, isObjectMethod); return funcDefOrType; } private STNode parseFunctionKeywordRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isFuncDef, boolean isObjectMethod) { return parseFunctionKeywordRhs(peek().kind, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); } private STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isFuncDef, boolean isObjectMethod) { STNode name; switch (nextTokenKind) { case IDENTIFIER_TOKEN: name = parseFunctionName(); isFuncDef = true; break; case OPEN_PAREN_TOKEN: name = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFunctionKeywordRhs(solution.tokenKind, metadata, visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod); } if (isFuncDef) { switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name, funcSignature); endContext(); return funcDef; } STNode funcSignature = parseFuncSignature(true); return parseReturnTypeDescRhs(metadata, visibilityQualifier, functionKeyword, funcSignature, isObjectMethod); } private STNode createFuncDefOrMethodDecl(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, boolean isObjectMethod, STNode name, STNode funcSignature) { STNode body = parseFunctionBody(isObjectMethod); if (body.kind == SyntaxKind.SEMICOLON_TOKEN) { return STNodeFactory.createFunctionDeclarationNode(metadata, visibilityQualifier, functionKeyword, name, funcSignature, body); } return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, funcSignature, body); } /** * Parse function signature. * <p> * <code> * function-signature := ( param-list ) return-type-descriptor * <br/> * return-type-descriptor := [ returns [annots] type-descriptor ] * </code> * * @param isParamNameOptional Whether the parameter names are optional * @param isInExprContext Whether this function signature is occurred within an expression context * @return Function signature node */ private STNode parseFuncSignature(boolean isParamNameOptional) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode parameters = parseParamList(isParamNameOptional); STNode closeParenthesis = parseCloseParenthesis(); endContext(); STNode returnTypeDesc = parseFuncReturnTypeDescriptor(); return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc); } private STNode parseReturnTypeDescRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword, STNode funcSignature, boolean isObjectMethod) { switch (peek().kind) { case SEMICOLON_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: endContext(); STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); if (isObjectMethod) { STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, visibilityQualifier, typeDesc, fieldName); } startContext(ParserRuleContext.VAR_DECL_STMT); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(metadata, visibilityQualifier, typedBindingPattern, true); case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: break; default: break; } STNode name = errorHandler.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcDef = createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name, funcSignature); endContext(); return funcDef; } /** * Validate the param list and return. If there are params without param-name, * then this method will create a new set of params with missing param-name * and return. * * @param signature Function signature * @return */ private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) { STNode parameters = signature.parameters; int paramCount = parameters.bucketCount(); int index = 0; for (; index < paramCount; index++) { STNode param = parameters.childInBucket(index); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { break; } continue; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { break; } continue; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { break; } continue; default: continue; } break; } if (index == paramCount) { return signature; } STNode updatedParams = getUpdatedParamList(parameters, index); return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams, signature.closeParenToken, signature.returnTypeDesc); } private STNode getUpdatedParamList(STNode parameters, int index) { int paramCount = parameters.bucketCount(); int newIndex = 0; ArrayList<STNode> newParams = new ArrayList<>(); for (; newIndex < index; newIndex++) { newParams.add(parameters.childInBucket(index)); } for (; newIndex < paramCount; newIndex++) { STNode param = parameters.childInBucket(newIndex); STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { param = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma, requiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName, paramName); } break; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { param = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma, defaultableParam.annotations, defaultableParam.visibilityQualifier, defaultableParam.typeName, paramName, defaultableParam.equalsToken, defaultableParam.expression); } break; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { param = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations, restParam.typeName, restParam.ellipsisToken, paramName); } break; default: break; } newParams.add(param); } return STNodeFactory.createNodeList(newParams); } private boolean isEmpty(STNode node) { return node == null; } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @param ctx Context of the parenthesis * @return Parsed node */ private STNode parseOpenParenthesis(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ctx, ctx); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParamList(boolean isParamNameOptional) { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); startContext(ParserRuleContext.REQUIRED_PARAM); STNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional); SyntaxKind prevParamKind = firstParam.kind; paramsList.add(firstParam); token = peek(); while (!isEndOfParametersList(token.kind)) { switch (prevParamKind) { case REST_PARAM: this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); break; case DEFAULTABLE_PARAM: startContext(ParserRuleContext.DEFAULTABLE_PARAM); break; case REQUIRED_PARAM: default: startContext(ParserRuleContext.REQUIRED_PARAM); break; } STNode paramEnd = parseParameterRhs(token.kind); if (paramEnd == null) { endContext(); break; } STNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional); prevParamKind = param.kind; paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); return params; } private STNode parseParameterRhs(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAM_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind); } } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param prevParamKind Kind of the parameter that precedes current parameter * @param leadingComma Comma that occurs before the param * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) { STToken token = peek(); return parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional); } private STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset, boolean isParamNameOptional) { return parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional); } private STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset, boolean isParamNameOptional) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: case IDENTIFIER_TOKEN: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; default: if (isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional); } return parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional); } private STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, int nextNextTokenOffset, boolean isFuncDef) { return parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, int nextTokenOffset, boolean isParamNameOptional) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: qualifier = STNodeFactory.createEmptyNode(); break; case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind, leadingComma, annots, nextTokenOffset, isParamNameOptional); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0, isParamNameOptional); } return parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional); } private STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, boolean isParamNameOptional) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); endContext(); return param; } private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) { STToken token = peek(); return parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); } private STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) { STNode paramName; switch (tokenKind) { case ELLIPSIS_TOKEN: switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { paramName = STNodeFactory.createEmptyNode(); } else { paramName = parseVariableName(); } return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); case EQUAL_TOKEN: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); default: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional); } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param prevParamKind Kind of the parameter that precedes current parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (prevParamKind == SyntaxKind.REQUIRED_PARAM) { switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseFuncReturnTypeDescriptor() { return parseFuncReturnTypeDescriptor(peek().kind); } private STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: return STNodeFactory.createEmptyNode(); case RETURNS_KEYWORD: break; default: STToken nextNextToken = getNextNextToken(nextTokenKind); if (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) { break; } return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = parseReturnsKeyword(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse 'returns' keyword. * * @return Return-keyword node */ private STNode parseReturnsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor(ParserRuleContext context) { return parseTypeDescriptor(context, false); } private STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern) { startContext(context); STNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern); endContext(); return typeDesc; } private STNode parseTypeDescriptorInternal(ParserRuleContext context) { return parseTypeDescriptorInternal(context, false); } private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern) { STToken token = peek(); STNode typeDesc = parseTypeDescriptorInternal(token.kind, context); return parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context, isTypedBindingPattern); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { return typeDesc; } return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern); case PIPE_TOKEN: return parseUnionTypeDescriptor(typeDesc, context); case BITWISE_AND_TOKEN: return parseIntersectionTypeDescriptor(typeDesc, context); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @param context Current context * @return Parsed node */ private STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilOrParenthesisedTypeDesc(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); case ERROR_KEYWORD: return parseErrorTypeDescriptor(); case STREAM_KEYWORD: return parseStreamTypeDescriptor(); case TABLE_KEYWORD: return parseTableTypeDescriptor(); case FUNCTION_KEYWORD: return parseFunctionTypeDesc(); case OPEN_BRACKET_TOKEN: return parseTupleTypeDesc(); default: if (isSingletonTypeDescStart(tokenKind, true)) { return parseSingletonTypeDesc(); } if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptorInternal(solution.tokenKind, context); } } private STNode parseNilOrParenthesisedTypeDesc() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); return parseNilOrParenthesisedTypeDescRhs(openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) { return parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) { STNode closeParen; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: closeParen = parseCloseParenthesis(); return STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen); default: if (isTypeStartingToken(nextTokenKind)) { STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS); closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ private STNode parseFunctionBody(boolean isObjectMethod) { STToken token = peek(); return parseFunctionBody(token.kind, isObjectMethod); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(false); case RIGHT_DOUBLE_ARROW_TOKEN: return parseExpressionFuncBody(false); case SEMICOLON_TOKEN: if (isObjectMethod) { return parseSemicolon(); } default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind, isObjectMethod); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function * @return Parsed node */ private STNode parseFunctionBodyBlock(boolean isAnonFunc) { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) { if (isAnonFunc) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case EQUAL_TOKEN: case BACKTICK_TOKEN: return true; default: break; } } return isEndOfStatements(); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case TYPE_KEYWORD: case PUBLIC_KEYWORD: default: return endOfModuleLevelNode(1); } } private boolean isEndOfObjectTypeNode() { return endOfModuleLevelNode(1, true); } private boolean isEndOfStatements() { switch (peek().kind) { case RESOURCE_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } private boolean endOfModuleLevelNode(int peekIndex) { return endOfModuleLevelNode(peekIndex, false); } private boolean endOfModuleLevelNode(int peekIndex, boolean isObject) { switch (peek(peekIndex).kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case LISTENER_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); case PUBLIC_KEYWORD: return endOfModuleLevelNode(peekIndex + 1, isObject); case FUNCTION_KEYWORD: if (isObject) { return false; } return peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN; default: return false; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case PERCENT_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: case PERCENT_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.REMOTE_CALL_ACTION; case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: return OperatorPrecedence.ACTION; case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return OperatorPrecedence.SHIFT; case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: return OperatorPrecedence.RANGE; case ELVIS_TOKEN: return OperatorPrecedence.ELVIS_CONDITIONAL; case QUESTION_MARK_TOKEN: case COLON_TOKEN: return OperatorPrecedence.CONDITIONAL; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case EXPRESSION_ACTION: case REMOTE_CALL_ACTION: case ANON_FUNC_OR_LET: case QUERY: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case SHIFT: return SyntaxKind.DOUBLE_LT_TOKEN; case RANGE: return SyntaxKind.ELLIPSIS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; case ELVIS_CONDITIONAL: return SyntaxKind.ELVIS_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); STNode typeRefOrPkgRef; if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { typeRefOrPkgRef = consume(); } else { Solution sol = recover(token, currentCtx); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } typeRefOrPkgRef = sol.recoveredNode; } return parseQualifiedIdentifier(typeRefOrPkgRef); } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { ArrayList<STNode> stmts = new ArrayList<>(); return parseStatements(stmts); } private STNode parseStatements(ArrayList<STNode> stmts) { while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind, 1); } private STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidExpressionStart(tokenKind, nextTokenIndex)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex); if (solution.action == Action.KEEP) { break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, nextTokenIndex); } return parseStatement(tokenKind, annots, nextTokenIndex); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots, 1); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case IDENTIFIER_TOKEN: return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseStatementStartsWithOpenBrace(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); case START_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case FROM_KEYWORD: case COMMIT_KEYWORD: return parseExpressionStament(tokenKind, getAnnotations(annots)); case XMLNS_KEYWORD: return parseXMLNamepsaceDeclaration(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); case RETRY_KEYWORD: return parseRetryStatement(); case ROLLBACK_KEYWORD: return parseRollbackStatement(); case OPEN_BRACKET_TOKEN: return parseStatementStartsWithOpenBracket(getAnnotations(annots), false); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } if (isValidExpressionStart(tokenKind, nextTokenIndex)) { return parseStamentStartWithExpr(tokenKind, getAnnotations(annots)); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex); if (solution.action == Action.KEEP) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots, nextTokenIndex - 1); } } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar); } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, typedBindingPattern, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, typedBindingPattern, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, typedBindingPattern, isModuleVar); } endContext(); if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ protected STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind, STNode annots) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, annots, false, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVExpr(((STIndexedExpressionNode) expression).containerExpression); default: return (expression instanceof STMissingToken); } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, STNode annots, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param annots Annotations * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, annots, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { STNode annots; if (kind == SyntaxKind.AT_TOKEN) { annots = parseAnnotations(); kind = peek().kind; } else { annots = STNodeFactory.createEmptyNode(); } STNode expr = parseTerminalExpression(kind, annots, isRhsExpr, allowActions); if (!isEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) { this.errorHandler.reportInvalidNode(null, "annotations are not supported for expressions"); } return expr; } private STNode parseTerminalExpression(SyntaxKind kind, STNode annots, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr, allowActions); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(isRhsExpr); case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: return parseTableConstructorOrQuery(isRhsExpr); case ERROR_KEYWORD: return parseErrorConstructorExpr(); case LET_KEYWORD: return parseLetExpression(isRhsExpr); case BACKTICK_TOKEN: return parseTemplateExpression(); case XML_KEYWORD: STToken nextNextToken = getNextNextToken(kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseXMLTemplateExpression(); } break; case STRING_KEYWORD: nextNextToken = getNextNextToken(kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseStringTemplateExpression(); } break; case FUNCTION_KEYWORD: return parseExplicitFunctionExpression(annots); case AT_TOKEN: break; case NEW_KEYWORD: return parseNewExpression(); case START_KEYWORD: return parseStartAction(annots); case FLUSH_KEYWORD: return parseFlushAction(); case LEFT_ARROW_TOKEN: return parseReceiveAction(); case WAIT_KEYWORD: return parseWaitAction(); case COMMIT_KEYWORD: return parseCommitAction(); case TRANSACTIONAL_KEYWORD: return parseTransactionalExpression(); case SERVICE_KEYWORD: return parseServiceConstructorExpression(annots); case BASE16_KEYWORD: case BASE64_KEYWORD: return parseByteArrayLiteral(kind); default: break; } Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { if (kind == SyntaxKind.XML_KEYWORD) { return parseXMLTemplateExpression(); } return parseStringTemplateExpression(); } switch (solution.tokenKind) { case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(solution.recoveredNode); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return solution.recoveredNode; default: return parseTerminalExpression(solution.tokenKind, annots, isRhsExpr, allowActions); } } private boolean isValidExprStart(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case AT_TOKEN: case NEW_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * <p> * Parse a new expression. * </p> * <code> * new-expr := explicit-new-expr | implicit-new-expr * <br/> * explicit-new-expr := new type-descriptor ( arg-list ) * <br/> * implicit-new-expr := new [( arg-list )] * </code> * * @return Parsed NewExpression node. */ private STNode parseNewExpression() { STNode newKeyword = parseNewKeyword(); return parseNewKeywordRhs(newKeyword); } /** * <p> * Parse `new` keyword. * </p> * * @return Parsed NEW_KEYWORD Token. */ private STNode parseNewKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.NEW_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.NEW_KEYWORD); return sol.recoveredNode; } } private STNode parseNewKeywordRhs(STNode newKeyword) { STNode token = peek(); return parseNewKeywordRhs(token.kind, newKeyword); } /** * <p> * Parse an implicit or explicit new expression. * </p> * * @param kind next token kind. * @param newKeyword parsed node for `new` keyword. * @return Parsed new-expression node. */ private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) { switch (kind) { case OPEN_PAREN_TOKEN: return parseImplicitNewRhs(newKeyword); case SEMICOLON_TOKEN: break; case IDENTIFIER_TOKEN: case OBJECT_KEYWORD: return parseTypeDescriptorInNewExpr(newKeyword); default: break; } return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode()); } /** * <p> * Parse an Explicit New expression. * </p> * <code> * explicit-new-expr := new type-descriptor ( arg-list ) * </code> * * @param newKeyword Parsed `new` keyword. * @return the Parsed Explicit New Expression. */ private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList); } /** * <p> * Parse an <code>implicit-new-expr</code> with arguments. * </p> * * @param newKeyword Parsed `new` keyword. * @return Parsed implicit-new-expr. */ private STNode parseImplicitNewRhs(STNode newKeyword) { STNode implicitNewArgList = parseParenthesizedArgList(); return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList); } /** * <p> * Parse the parenthesized argument list for a <code>new-expr</code>. * </p> * * @return Parsed parenthesized rhs of <code>new-expr</code>. */ private STNode parseParenthesizedArgList() { STNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParan = parseCloseParenthesis(); return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isRhsExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } if (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) { if (peek(3).kind == SyntaxKind.GT_TOKEN) { tokenKind = SyntaxKind.TRIPPLE_GT_TOKEN; } else { tokenKind = SyntaxKind.DOUBLE_GT_TOKEN; } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence, allowActions)) { return lhsExpr; } STNode newLhsExpr; STNode operator; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; case SYNC_SEND_TOKEN: newLhsExpr = parseSyncSendAction(lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; case RIGHT_DOUBLE_ARROW_TOKEN: newLhsExpr = parseImplicitAnonFunc(lhsExpr); break; case ANNOT_CHAINING_TOKEN: newLhsExpr = parseAnnotAccessExpression(lhsExpr); break; case OPTIONAL_CHAINING_TOKEN: newLhsExpr = parseOptionalFieldAccessExpression(lhsExpr); break; case QUESTION_MARK_TOKEN: newLhsExpr = parseConditionalExpression(lhsExpr); break; case DOT_LT_TOKEN: newLhsExpr = parseXMLFilterExpression(lhsExpr); break; case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: newLhsExpr = parseXMLStepExpression(lhsExpr); break; default: if (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) { operator = parseSignedRightShiftToken(); } else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) { operator = parseUnsignedRightShiftToken(); } else { operator = parseBinaryOperator(); } STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case QUESTION_MARK_TOKEN: case COLON_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @param isRhsExpr Is this is a rhs expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) { startContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR); STNode openBracket = parseOpenBracket(); STNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse key expression of a member access expression. A type descriptor * that starts with a type-ref (e.g: T[a][b]) also goes through this * method. * <p> * <code>key-expression := single-key-expression | multi-key-expression</code> * * @param isRhsExpr Is this is a rhs expression * @return Key expression */ private STNode parseMemberAccessKeyExprs(boolean isRhsExpr) { List<STNode> exprList = new ArrayList<>(); STNode keyExpr; STNode keyExprEnd; while (!isEndOfTypeList(peek().kind)) { keyExpr = parseKeyExpr(isRhsExpr); exprList.add(keyExpr); keyExprEnd = parseMemberAccessKeyExprEnd(); if (keyExprEnd == null) { break; } exprList.add(keyExprEnd); } if (isRhsExpr && exprList.isEmpty()) { exprList.add(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); this.errorHandler.reportInvalidNode(null, "missing key expression"); } if (!isRhsExpr && exprList.size() > 1) { this.errorHandler.reportInvalidNode(null, "cannot have multiple keys"); } return STNodeFactory.createNodeList(exprList); } private STNode parseKeyExpr(boolean isRhsExpr) { if (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) { return STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_TOKEN, consume()); } return parseExpression(isRhsExpr); } private STNode parseMemberAccessKeyExprEnd() { return parseMemberAccessKeyExprEnd(peek().kind); } private STNode parseMemberAccessKeyExprEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMemberAccessKeyExprEnd(solution.tokenKind); } } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access, xml required attribute access expressions or method call expression. * <p> * <code> * field-access-expr := expression . field-name * <br/> * xml-required-attribute-access-expr := expression . xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * method-call-expr := expression . method-name ( arg-list ) * </code> * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseFieldAccessIdentifier(); if (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); STToken nextToken = peek(); STNode expr; if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteralOrEmptyAnonFuncParamRhs(openParen); } if (allowActions) { expr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } else { expr = parseExpression(isRhsExpr); } if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr); } STNode closeParen = parseCloseParenthesis(); endContext(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } private STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) { STNode closeParen = parseCloseParenthesis(); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { endContext(); return createNilLiteral(openParen, closeParen); } else { STNode params = STNodeFactory.createNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; } } private STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr) { switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: STNode closeParen = parseCloseParenthesis(); STNode bracedEXprOrAnonFuncParam; if (isAction(expr)) { bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } endContext(); return bracedEXprOrAnonFuncParam; case COMMA_TOKEN: return parseImplicitAnonFunc(openParen, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS); if (solution.action == Action.REMOVE) { endContext(); return solution.recoveredNode; } return parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: case START_ACTION: case TRAP_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: case IN_KEYWORD: case BACKTICK_TOKEN: case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case DO_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * <p> * Parse error constructor expression. * </p> * <code> * error-constructor-expr := error ( arg-list ) * </code> * * @return Error constructor expression */ private STNode parseErrorConstructorExpr() { return parseFuncCall(parseErrorKeyWord()); } /** * Parse function call argument list. * * @return Parsed args list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); if (arg == null) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind; if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); lastProcessedArgKind = arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); lastProcessedArgKind = SyntaxKind.POSITIONAL_ARG; } parseFollowUpArgs(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArgs(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { STNode argEnd = parseArgEnd(nextToken.kind); if (argEnd == null) { break; } nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) argEnd, "invalid token " + argEnd); break; } STNode arg = parseArg(nextToken.kind, argEnd); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); } } private STNode parseArgEnd() { return parseArgEnd(peek().kind); } private STNode parseArgEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: Solution solution = recover(peek(), ParserRuleContext.ARG_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseArgEnd(solution.tokenKind); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma, kind); break; case CLOSE_PAREN_TOKEN: return null; default: if (isValidExprStart(kind)) { expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } Solution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseArg(solution.tokenKind, leadingComma); } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma, SyntaxKind nextTokenKind) { STNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = parseAssignOp(); STNode valExpr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrExpr, equal, valExpr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr); default: argNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); while (!isEndOfObjectTypeNode()) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(peek().kind); endContext(); if (member == null) { break; } objectMembers.add(member); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (!isEmpty(visibilityQualifiers)) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFuncDefOrFuncTypeDesc(metadata, methodQualifiers, true); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } STNode semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD); fields.add(field); return parseMappingConstructorFields(fields); } private STNode parseMappingConstructorFields(List<STNode> fields) { STToken nextToken; STNode mappingFieldEnd; nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { mappingFieldEnd = parseMappingFieldEnd(nextToken.kind); if (mappingFieldEnd == null) { break; } fields.add(mappingFieldEnd); STNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private STNode parseMappingFieldEnd() { return parseMappingFieldEnd(peek().kind); } private STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingFieldEnd(solution.tokenKind); } } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param fieldContext Context of the mapping field * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(ParserRuleContext fieldContext) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, fieldContext); } private STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(ellipsis, expr); case CLOSE_BRACE_TOKEN: if (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) { return null; } default: STToken token = peek(); Solution solution = recover(token, fieldContext, fieldContext); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, fieldContext); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue() { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(key); } private STNode parseSpecificFieldRhs(STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, key); } return STNodeFactory.createSpecificFieldNode(key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField() { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); onKeyword = addDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION); return STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfExpressionsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return false; case EOF_TOKEN: case SEMICOLON_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case OPEN_BRACE_TOKEN: return true; default: return !isValidExprStart(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFuncDefinition(metadata, resourceKeyword, false); case FUNCTION_KEYWORD: return parseFuncDefinition(metadata, STNodeFactory.createEmptyNode(), false); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */
There is no such decorator. So, until its implemented we cant use it.
private void applyConfig(Session session, String target, String name, PlatformConfiguration config) { config.getLabels().entrySet().stream().forEach(e -> { session.resources().decorate(target, new AddLabelDecorator(new Label(e.getKey(), e.getValue()))); }); config.getAnnotations().entrySet().stream().forEach(e -> { session.resources().decorate(target, new AddAnnotationDecorator(new Annotation(e.getKey(), e.getValue()))); }); config.getEnvVars().entrySet().stream().forEach(e -> { session.resources().decorate(target, new AddEnvVarDecorator(EnvConverter.convert(e))); }); config.getWorkingDir().ifPresent(w -> { session.resources().decorate(target, new ApplyWorkingDirDecorator(name, DEPLOY)); }); config.getCommand().ifPresent(c -> { session.resources().decorate(target, new ApplyCommandDecorator(name, c.toArray(new String[c.size()]))); }); config.getArguments().ifPresent(a -> { session.resources().decorate(target, new ApplyArgsDecorator(name, a.toArray(new String[a.size()]))); }); config.getServiceAccount().ifPresent(s -> { session.resources().decorate(target, new ApplyServiceAccountNamedDecorator(name, s)); }); session.resources().decorate(new ApplyImagePullPolicyDecorator(config.getImagePullPolicy())); config.getImagePullSecrets().ifPresent(l -> { l.forEach(s -> session.resources().decorate(target, new AddImagePullSecretDecorator(name, s))); }); config.getLivenessProbe().ifPresent(p -> { session.resources().decorate(target, new AddLivenessProbeDecorator(name, ProbeConverter.convert(p))); }); config.getReadinessProbe().ifPresent(p -> { session.resources().decorate(target, new AddReadinessProbeDecorator(name, ProbeConverter.convert(p))); }); config.getMounts().entrySet().forEach(e -> { session.resources().decorate(target, new AddMountDecorator(MountConverter.convert(e))); }); config.getSecretVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddSecretVolumeDecorator(SecretVolumeConverter.convert(e))); }); config.getConfigMapVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddConfigMapVolumeDecorator(ConfigMapVolumeConverter.convert(e))); }); config.getPvcVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddPvcVolumeDecorator(PvcVolumeConverter.convert(e))); }); config.getAwsElasticBlockStoreVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAwsElasticBlockStoreVolumeDecorator(AwsElasticBlockStoreVolumeConverter.convert(e))); }); config.getAzureFileVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAzureFileVolumeDecorator(AzureFileVolumeConverter.convert(e))); }); config.getAzureDiskVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAzureDiskVolumeDecorator(AzureDiskVolumeConverter.convert(e))); }); config.getInitContainers().entrySet().forEach(e -> { session.resources().decorate(target, new AddInitContainerDecorator(name, ContainerConverter.convert(e))); }); config.getContainers().entrySet().forEach(e -> { session.resources().decorate(target, new AddSidecarDecorator(name, ContainerConverter.convert(e))); }); }
session.resources().decorate(target, new AddSecretVolumeDecorator(SecretVolumeConverter.convert(e)));
private void applyConfig(Session session, String target, String name, PlatformConfiguration config) { config.getLabels().forEach((key, value) -> { session.resources().decorate(target, new AddLabelDecorator(new Label(key, value))); }); config.getAnnotations().forEach((key, value) -> { session.resources().decorate(target, new AddAnnotationDecorator(new Annotation(key, value))); }); config.getEnvVars().entrySet().forEach(e -> { session.resources().decorate(target, new AddEnvVarDecorator(EnvConverter.convert(e))); }); config.getWorkingDir().ifPresent(w -> { session.resources().decorate(target, new ApplyWorkingDirDecorator(name, DEPLOY)); }); config.getCommand().ifPresent(c -> { session.resources().decorate(target, new ApplyCommandDecorator(name, c.toArray(new String[c.size()]))); }); config.getArguments().ifPresent(a -> { session.resources().decorate(target, new ApplyArgsDecorator(name, a.toArray(new String[a.size()]))); }); config.getServiceAccount().ifPresent(s -> { session.resources().decorate(target, new ApplyServiceAccountNamedDecorator(name, s)); }); session.resources().decorate(new ApplyImagePullPolicyDecorator(config.getImagePullPolicy())); config.getImagePullSecrets().ifPresent(l -> { l.forEach(s -> session.resources().decorate(target, new AddImagePullSecretDecorator(name, s))); }); config.getLivenessProbe().ifPresent(p -> { session.resources().decorate(target, new AddLivenessProbeDecorator(name, ProbeConverter.convert(p))); }); config.getReadinessProbe().ifPresent(p -> { session.resources().decorate(target, new AddReadinessProbeDecorator(name, ProbeConverter.convert(p))); }); config.getMounts().entrySet().forEach(e -> { session.resources().decorate(target, new AddMountDecorator(MountConverter.convert(e))); }); config.getSecretVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddSecretVolumeDecorator(SecretVolumeConverter.convert(e))); }); config.getConfigMapVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddConfigMapVolumeDecorator(ConfigMapVolumeConverter.convert(e))); }); config.getPvcVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddPvcVolumeDecorator(PvcVolumeConverter.convert(e))); }); config.getAwsElasticBlockStoreVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAwsElasticBlockStoreVolumeDecorator(AwsElasticBlockStoreVolumeConverter.convert(e))); }); config.getAzureFileVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAzureFileVolumeDecorator(AzureFileVolumeConverter.convert(e))); }); config.getAzureDiskVolumes().entrySet().forEach(e -> { session.resources().decorate(target, new AddAzureDiskVolumeDecorator(AzureDiskVolumeConverter.convert(e))); }); config.getInitContainers().entrySet().forEach(e -> { session.resources().decorate(target, new AddInitContainerDecorator(name, ContainerConverter.convert(e))); }); config.getContainers().entrySet().forEach(e -> { session.resources().decorate(target, new AddSidecarDecorator(name, ContainerConverter.convert(e))); }); }
class KubernetesProcessor { private static final String PROPERTY_PREFIX = "dekorate."; private static final Set<String> ALLOWED_GENERATORS = new HashSet( Arrays.asList("kubernetes", "openshift", "knative", "docker", "s2i")); private static final Set<String> IMAGE_GENERATORS = new HashSet(Arrays.asList("docker", "s2i")); private static final String DOCKER_REGISTRY_PROPERTY = PROPERTY_PREFIX + "docker.registry"; private static final String APP_GROUP_PROPERTY = "app.group"; private static final String OUTPUT_ARTIFACT_FORMAT = "%s%s.jar"; @BuildStep(onlyIf = IsNormal.class) public void build(ApplicationInfoBuildItem applicationInfo, ArchiveRootBuildItem archiveRootBuildItem, OutputTargetBuildItem outputTargetBuildItem, PackageConfig packageConfig, KubernetesConfig kubernetesConfig, OpenshiftConfig openshiftConfig, KnativeConfig knativeConfig, List<KubernetesEnvVarBuildItem> kubernetesEnvBuildItems, List<KubernetesRoleBuildItem> kubernetesRoleBuildItems, List<KubernetesPortBuildItem> kubernetesPortBuildItems, Optional<BaseImageInfoBuildItem> baseImageBuildItem, Optional<ContainerImageInfoBuildItem> containerImageBuildItem, Optional<KubernetesCommandBuildItem> commandBuildItem, Optional<KubernetesHealthLivenessPathBuildItem> kubernetesHealthLivenessPathBuildItem, Optional<KubernetesHealthReadinessPathBuildItem> kubernetesHealthReadinessPathBuildItem, BuildProducer<GeneratedFileSystemResourceBuildItem> generatedResourceProducer, BuildProducer<FeatureBuildItem> featureProducer) throws UnsupportedEncodingException { if (kubernetesPortBuildItems.isEmpty()) { return; } final Path root; try { root = Files.createTempDirectory("quarkus-kubernetes"); } catch (IOException e) { throw new RuntimeException("Unable to setup environment for generating Kubernetes resources", e); } Map<String, Object> config = KubernetesConfigUtil.toMap(); config.entrySet().forEach(e -> System.out.println(e.getKey() + "=" + e.getValue())); Set<String> deploymentTargets = new HashSet<>(); deploymentTargets.addAll(KubernetesConfigUtil.getDeploymentTargets(config)); deploymentTargets.addAll(kubernetesConfig.getDeploymentTarget().stream().map(Enum::name).map(String::toLowerCase) .collect(Collectors.toList())); Optional<String> dockerRegistry = KubernetesConfigUtil.getDockerRegistry(config); dockerRegistry.ifPresent(v -> System.setProperty(DOCKER_REGISTRY_PROPERTY, v)); Optional<String> kubernetesGroup = KubernetesConfigUtil.getGroup(config); kubernetesGroup.ifPresent(v -> System.setProperty(APP_GROUP_PROPERTY, v)); String name = KubernetesConfigUtil.getName(config).orElse(applicationInfo.getName()); Path artifactPath = archiveRootBuildItem.getArchiveRoot().resolve( String.format(OUTPUT_ARTIFACT_FORMAT, outputTargetBuildItem.getBaseName(), packageConfig.runnerSuffix)); final Map<String, String> generatedResourcesMap; try { final SessionWriter sessionWriter = new SimpleFileWriter(root, false); Project project = createProject(applicationInfo, artifactPath); sessionWriter.setProject(project); final Session session = Session.getSession(); session.setWriter(sessionWriter); session.feed(Maps.fromProperties(config)); applyGlobalConfig(session, kubernetesConfig); applyConfig(session, KUBERNETES, name, kubernetesConfig); applyConfig(session, OPENSHIFT, name, openshiftConfig); applyConfig(session, KNATIVE, name, knativeConfig); applyBuildItems(session, name, deploymentTargets, kubernetesEnvBuildItems, kubernetesRoleBuildItems, kubernetesPortBuildItems, baseImageBuildItem, containerImageBuildItem, commandBuildItem, kubernetesHealthLivenessPathBuildItem, kubernetesHealthReadinessPathBuildItem); generatedResourcesMap = session.close(); } finally { if (kubernetesGroup.isPresent()) { System.clearProperty(APP_GROUP_PROPERTY); } if (dockerRegistry.isPresent()) { System.clearProperty(DOCKER_REGISTRY_PROPERTY); } } for (Map.Entry<String, String> resourceEntry : generatedResourcesMap.entrySet()) { String fileName = resourceEntry.getKey().replace(root.toAbsolutePath().toString(), ""); String relativePath = resourceEntry.getKey().replace(root.toAbsolutePath().toString(), "kubernetes"); if (fileName.endsWith(".yml") || fileName.endsWith(".json")) { String target = fileName.substring(0, fileName.lastIndexOf(".")); if (target.startsWith(File.separator)) { target = target.substring(1); } if (!deploymentTargets.contains(target)) { continue; } } generatedResourceProducer.produce( new GeneratedFileSystemResourceBuildItem( relativePath, resourceEntry.getValue().getBytes(StandardCharsets.UTF_8))); } featureProducer.produce(new FeatureBuildItem(FeatureBuildItem.KUBERNETES)); } /** * Apply global changes * * @param session The session to apply the changes * @param config The {@link KubernetesConfig} instance */ private void applyGlobalConfig(Session session, KubernetesConfig config) { config.getPorts().entrySet().stream().forEach(e -> session.configurators().add(new AddPort(PortConverter.convert(e)))); } /** * Apply changes to the target resource group * * @param session The session to apply the changes * @param target The deployment target (e.g. kubernetes, openshift, knative) * @param name The name of the resource to accept the configuration * @param config The {@link PlatformConfiguration} instance */ private void applyBuildItems(Session session, String name, Set<String> deploymentTargets, List<KubernetesEnvVarBuildItem> kubernetesEnvBuildItems, List<KubernetesRoleBuildItem> kubernetesRoleBuildItems, List<KubernetesPortBuildItem> kubernetesPortBuildItems, Optional<BaseImageInfoBuildItem> baseImageBuildItem, Optional<ContainerImageInfoBuildItem> containerImageBuildItem, Optional<KubernetesCommandBuildItem> commandBuildItem, Optional<KubernetesHealthLivenessPathBuildItem> kubernetesHealthLivenessPathBuildItem, Optional<KubernetesHealthReadinessPathBuildItem> kubernetesHealthReadinessPathBuildItem) { containerImageBuildItem.ifPresent(c -> session.resources() .decorate(new ApplyImageDecorator(name, c.getImage()))); kubernetesEnvBuildItems.forEach(e -> session.resources() .decorate(new AddEnvVarDecorator(new EnvBuilder().withName(e.getName()).withValue(e.getValue()).build()))); commandBuildItem.ifPresent(c -> { session.resources().decorate(new ApplyCommandDecorator(name, new String[] { c.getCommand() })); session.resources().decorate(new ApplyArgsDecorator(name, c.getArgs())); }); final Map<String, Integer> ports = verifyPorts(kubernetesPortBuildItems); ports.entrySet().stream() .map(e -> new PortBuilder().withName(e.getKey()).withContainerPort(e.getValue()).build()) .forEach(p -> session.configurators().add(new AddPort(p))); if (!kubernetesPortBuildItems.isEmpty()) { session.resources().decorate(new ApplyServiceAccountNamedDecorator()); session.resources().decorate(new AddServiceAccountResourceDecorator()); kubernetesRoleBuildItems .forEach(r -> session.resources().decorate(new AddRoleBindingResourceDecorator(r.getRole()))); } if (deploymentTargets.contains(DeploymentTarget.OPENSHIFT.name().toLowerCase())) { baseImageBuildItem.map(BaseImageInfoBuildItem::getImage).ifPresent(builderImage -> { S2iBuildConfig s2iBuildConfig = new S2iBuildConfigBuilder().withBuilderImage(builderImage).build(); session.resources().decorate(DeploymentTarget.OPENSHIFT.name().toLowerCase(), new AddBuilderImageStreamResourceDecorator(s2iBuildConfig)); session.resources().decorate(new ApplyBuilderImageDecorator(name, builderImage)); }); } kubernetesHealthLivenessPathBuildItem .ifPresent(l -> session.resources() .decorate(new AddLivenessProbeDecorator(name, new ProbeBuilder() .withHttpActionPath(l.getPath()) .build()))); kubernetesHealthReadinessPathBuildItem .ifPresent(r -> session.resources() .decorate(new AddReadinessProbeDecorator(name, new ProbeBuilder() .withHttpActionPath(r.getPath()) .build()))); } private Map<String, Integer> verifyPorts(List<KubernetesPortBuildItem> kubernetesPortBuildItems) { final Map<String, Integer> result = new HashMap<>(); final Set<Integer> usedPorts = new HashSet<>(); for (KubernetesPortBuildItem entry : kubernetesPortBuildItems) { final String name = entry.getName(); if (result.containsKey(name)) { throw new IllegalArgumentException( "All Kubernetes ports must have unique names - " + name + "has been used multiple times"); } final Integer port = entry.getPort(); if (usedPorts.contains(port)) { throw new IllegalArgumentException( "All Kubernetes ports must be unique - " + port + "has been used multiple times"); } result.put(name, port); usedPorts.add(port); } return result; } private Project createProject(ApplicationInfoBuildItem app, Path artifactPath) { Project project = FileProjectFactory.create(artifactPath.toFile()); BuildInfo buildInfo = new BuildInfo(app.getName(), app.getVersion(), "jar", project.getBuildInfo().getBuildTool(), artifactPath, project.getBuildInfo().getOutputFile(), project.getBuildInfo().getClassOutputDir()); return new Project(project.getRoot(), buildInfo, project.getScmInfo()); } }
class KubernetesProcessor { private static final String PROPERTY_PREFIX = "dekorate."; private static final String DOCKER_REGISTRY_PROPERTY = PROPERTY_PREFIX + "docker.registry"; private static final String APP_GROUP_PROPERTY = "app.group"; private static final String OUTPUT_ARTIFACT_FORMAT = "%s%s.jar"; @BuildStep(onlyIf = IsNormal.class) public void build(ApplicationInfoBuildItem applicationInfo, ArchiveRootBuildItem archiveRootBuildItem, OutputTargetBuildItem outputTargetBuildItem, PackageConfig packageConfig, KubernetesConfig kubernetesConfig, OpenshiftConfig openshiftConfig, KnativeConfig knativeConfig, List<KubernetesEnvVarBuildItem> kubernetesEnvBuildItems, List<KubernetesRoleBuildItem> kubernetesRoleBuildItems, List<KubernetesPortBuildItem> kubernetesPortBuildItems, Optional<BaseImageInfoBuildItem> baseImageBuildItem, Optional<ContainerImageInfoBuildItem> containerImageBuildItem, Optional<KubernetesCommandBuildItem> commandBuildItem, Optional<KubernetesHealthLivenessPathBuildItem> kubernetesHealthLivenessPathBuildItem, Optional<KubernetesHealthReadinessPathBuildItem> kubernetesHealthReadinessPathBuildItem, BuildProducer<GeneratedFileSystemResourceBuildItem> generatedResourceProducer, BuildProducer<FeatureBuildItem> featureProducer) throws UnsupportedEncodingException { if (kubernetesPortBuildItems.isEmpty()) { return; } final Path root; try { root = Files.createTempDirectory("quarkus-kubernetes"); } catch (IOException e) { throw new RuntimeException("Unable to setup environment for generating Kubernetes resources", e); } Map<String, Object> config = KubernetesConfigUtil.toMap(); Set<String> deploymentTargets = new HashSet<>(); deploymentTargets.addAll(KubernetesConfigUtil.getDeploymentTargets(config)); deploymentTargets.addAll(kubernetesConfig.deploymentTarget.stream().map(Enum::name).map(String::toLowerCase) .collect(Collectors.toList())); Optional<String> dockerRegistry = KubernetesConfigUtil.getDockerRegistry(config); dockerRegistry.ifPresent(v -> System.setProperty(DOCKER_REGISTRY_PROPERTY, v)); Optional<String> kubernetesGroup = KubernetesConfigUtil.getGroup(config); kubernetesGroup.ifPresent(v -> System.setProperty(APP_GROUP_PROPERTY, v)); String name = KubernetesConfigUtil.getName(config).orElse(applicationInfo.getName()); Path artifactPath = archiveRootBuildItem.getArchiveRoot().resolve( String.format(OUTPUT_ARTIFACT_FORMAT, outputTargetBuildItem.getBaseName(), packageConfig.runnerSuffix)); final Map<String, String> generatedResourcesMap; try { final SessionWriter sessionWriter = new SimpleFileWriter(root, false); Project project = createProject(applicationInfo, artifactPath); sessionWriter.setProject(project); final Session session = Session.getSession(); session.setWriter(sessionWriter); session.feed(Maps.fromProperties(config)); applyGlobalConfig(session, kubernetesConfig); applyConfig(session, KUBERNETES, name, kubernetesConfig); applyConfig(session, OPENSHIFT, name, openshiftConfig); applyConfig(session, KNATIVE, name, knativeConfig); applyBuildItems(session, name, deploymentTargets, kubernetesEnvBuildItems, kubernetesRoleBuildItems, kubernetesPortBuildItems, baseImageBuildItem, containerImageBuildItem, commandBuildItem, kubernetesHealthLivenessPathBuildItem, kubernetesHealthReadinessPathBuildItem); generatedResourcesMap = session.close(); } finally { if (kubernetesGroup.isPresent()) { System.clearProperty(APP_GROUP_PROPERTY); } if (dockerRegistry.isPresent()) { System.clearProperty(DOCKER_REGISTRY_PROPERTY); } } for (Map.Entry<String, String> resourceEntry : generatedResourcesMap.entrySet()) { String fileName = resourceEntry.getKey().replace(root.toAbsolutePath().toString(), ""); String relativePath = resourceEntry.getKey().replace(root.toAbsolutePath().toString(), "kubernetes"); if (fileName.endsWith(".yml") || fileName.endsWith(".json")) { String target = fileName.substring(0, fileName.lastIndexOf(".")); if (target.startsWith(File.separator)) { target = target.substring(1); } if (!deploymentTargets.contains(target)) { continue; } } generatedResourceProducer.produce( new GeneratedFileSystemResourceBuildItem( relativePath, resourceEntry.getValue().getBytes(StandardCharsets.UTF_8))); } featureProducer.produce(new FeatureBuildItem(FeatureBuildItem.KUBERNETES)); } /** * Apply global changes * * @param session The session to apply the changes * @param config The {@link KubernetesConfig} instance */ private void applyGlobalConfig(Session session, KubernetesConfig config) { config.ports.entrySet().forEach(e -> session.configurators().add(new AddPort(PortConverter.convert(e)))); } /** * Apply changes to the target resource group * * @param session The session to apply the changes * @param target The deployment target (e.g. kubernetes, openshift, knative) * @param name The name of the resource to accept the configuration * @param config The {@link PlatformConfiguration} instance */ private void applyBuildItems(Session session, String name, Set<String> deploymentTargets, List<KubernetesEnvVarBuildItem> kubernetesEnvBuildItems, List<KubernetesRoleBuildItem> kubernetesRoleBuildItems, List<KubernetesPortBuildItem> kubernetesPortBuildItems, Optional<BaseImageInfoBuildItem> baseImageBuildItem, Optional<ContainerImageInfoBuildItem> containerImageBuildItem, Optional<KubernetesCommandBuildItem> commandBuildItem, Optional<KubernetesHealthLivenessPathBuildItem> kubernetesHealthLivenessPathBuildItem, Optional<KubernetesHealthReadinessPathBuildItem> kubernetesHealthReadinessPathBuildItem) { containerImageBuildItem.ifPresent(c -> session.resources() .decorate(new ApplyImageDecorator(name, c.getImage()))); kubernetesEnvBuildItems.forEach(e -> session.resources() .decorate(new AddEnvVarDecorator(new EnvBuilder().withName(e.getName()).withValue(e.getValue()).build()))); commandBuildItem.ifPresent(c -> { session.resources().decorate(new ApplyCommandDecorator(name, new String[] { c.getCommand() })); session.resources().decorate(new ApplyArgsDecorator(name, c.getArgs())); }); final Map<String, Integer> ports = verifyPorts(kubernetesPortBuildItems); ports.entrySet().stream() .map(e -> new PortBuilder().withName(e.getKey()).withContainerPort(e.getValue()).build()) .forEach(p -> session.configurators().add(new AddPort(p))); if (!kubernetesPortBuildItems.isEmpty()) { session.resources().decorate(new ApplyServiceAccountNamedDecorator()); session.resources().decorate(new AddServiceAccountResourceDecorator()); kubernetesRoleBuildItems .forEach(r -> session.resources().decorate(new AddRoleBindingResourceDecorator(r.getRole()))); } if (deploymentTargets.contains(DeploymentTarget.OPENSHIFT.name().toLowerCase())) { baseImageBuildItem.map(BaseImageInfoBuildItem::getImage).ifPresent(builderImage -> { S2iBuildConfig s2iBuildConfig = new S2iBuildConfigBuilder().withBuilderImage(builderImage).build(); session.resources().decorate(DeploymentTarget.OPENSHIFT.name().toLowerCase(), new AddBuilderImageStreamResourceDecorator(s2iBuildConfig)); session.resources().decorate(new ApplyBuilderImageDecorator(name, builderImage)); }); } kubernetesHealthLivenessPathBuildItem .ifPresent(l -> session.resources() .decorate(new AddLivenessProbeDecorator(name, new ProbeBuilder() .withHttpActionPath(l.getPath()) .build()))); kubernetesHealthReadinessPathBuildItem .ifPresent(r -> session.resources() .decorate(new AddReadinessProbeDecorator(name, new ProbeBuilder() .withHttpActionPath(r.getPath()) .build()))); } private Map<String, Integer> verifyPorts(List<KubernetesPortBuildItem> kubernetesPortBuildItems) { final Map<String, Integer> result = new HashMap<>(); final Set<Integer> usedPorts = new HashSet<>(); for (KubernetesPortBuildItem entry : kubernetesPortBuildItems) { final String name = entry.getName(); if (result.containsKey(name)) { throw new IllegalArgumentException( "All Kubernetes ports must have unique names - " + name + "has been used multiple times"); } final Integer port = entry.getPort(); if (usedPorts.contains(port)) { throw new IllegalArgumentException( "All Kubernetes ports must be unique - " + port + "has been used multiple times"); } result.put(name, port); usedPorts.add(port); } return result; } private Project createProject(ApplicationInfoBuildItem app, Path artifactPath) { Project project = FileProjectFactory.create(artifactPath.toFile()); BuildInfo buildInfo = new BuildInfo(app.getName(), app.getVersion(), "jar", project.getBuildInfo().getBuildTool(), artifactPath, project.getBuildInfo().getOutputFile(), project.getBuildInfo().getClassOutputDir()); return new Project(project.getRoot(), buildInfo, project.getScmInfo()); } }
One already created bean, which generally won't happen until runtime when you actually have vert.x. Currently if you fail in static init then this method is called, but vert.x is not ready yet (as it is a runtime bean), so you get an extra unrelated exception in your logs (and possible unclean shutdown).
void undeployVerticles(@Observes @BeforeDestroyed(ApplicationScoped.class) Object event, BeanManager beanManager) { Set<Bean<?>> beans = beanManager.getBeans(AbstractVerticle.class, Any.Literal.INSTANCE); Context applicationContext = beanManager.getContext(ApplicationScoped.class); for (Bean<?> bean : beans) { if (ApplicationScoped.class.equals(bean.getScope())) { Object instance = applicationContext.get(bean); if (instance != null) { try { AbstractVerticle verticle = (AbstractVerticle) instance; io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance() .select(io.vertx.mutiny.core.Vertx.class).get(); mutiny.undeploy(verticle.deploymentID()).await().indefinitely(); LOGGER.debugf("Undeployed verticle: %s", instance.getClass()); } catch (Exception e) { LOGGER.debugf("Unable to undeploy verticle %s: %s", instance.getClass(), e.toString()); } } } } }
io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance()
void undeployVerticles(@Observes @BeforeDestroyed(ApplicationScoped.class) Object event, BeanManager beanManager) { Set<Bean<?>> beans = beanManager.getBeans(AbstractVerticle.class, Any.Literal.INSTANCE); Context applicationContext = beanManager.getContext(ApplicationScoped.class); for (Bean<?> bean : beans) { if (ApplicationScoped.class.equals(bean.getScope())) { Object instance = applicationContext.get(bean); if (instance != null) { try { AbstractVerticle verticle = (AbstractVerticle) instance; io.vertx.mutiny.core.Vertx mutiny = beanManager.createInstance() .select(io.vertx.mutiny.core.Vertx.class).get(); mutiny.undeploy(verticle.deploymentID()).await().indefinitely(); LOGGER.debugf("Undeployed verticle: %s", instance.getClass()); } catch (Exception e) { LOGGER.debugf("Unable to undeploy verticle %s: %s", instance.getClass(), e.toString()); } } } } }
class VertxProducer { private static final Logger LOGGER = Logger.getLogger(VertxProducer.class); @Singleton @Produces public EventBus eventbus(Vertx vertx) { return vertx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.Vertx mutiny(Vertx vertx) { return io.vertx.mutiny.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.Vertx axle(Vertx vertx) { LOGGER.warn( "`io.vertx.axle.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.axle.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.Vertx rx(Vertx vertx) { LOGGER.warn( "`io.vertx.reactivex.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.reactivex.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.eventbus.EventBus axleEventBus(io.vertx.axle.core.Vertx axle) { LOGGER.warn( "`io.vertx.axle.core.eventbus.EventBus` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return axle.eventBus(); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.eventbus.EventBus rxEventBus(io.vertx.reactivex.core.Vertx rx) { LOGGER.warn( "`io.vertx.reactivex.core.eventbus.EventBus` is deprecated and will be removed in a future version - it " + "is recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return rx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.eventbus.EventBus mutinyEventBus(io.vertx.mutiny.core.Vertx mutiny) { return mutiny.eventBus(); } /** * Undeploy verticles backed by contextual instances of {@link ApplicationScoped} beans before the application context is * destroyed. Otherwise Vertx may attempt to stop the verticles after the CDI container is shut down. * * @param event * @param beanManager */ }
class VertxProducer { private static final Logger LOGGER = Logger.getLogger(VertxProducer.class); @Singleton @Produces public EventBus eventbus(Vertx vertx) { return vertx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.Vertx mutiny(Vertx vertx) { return io.vertx.mutiny.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.Vertx axle(Vertx vertx) { LOGGER.warn( "`io.vertx.axle.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.axle.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.Vertx rx(Vertx vertx) { LOGGER.warn( "`io.vertx.reactivex.core.Vertx` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.Vertx`"); return io.vertx.reactivex.core.Vertx.newInstance(vertx); } @Singleton @Produces @Deprecated public io.vertx.axle.core.eventbus.EventBus axleEventBus(io.vertx.axle.core.Vertx axle) { LOGGER.warn( "`io.vertx.axle.core.eventbus.EventBus` is deprecated and will be removed in a future version - it is " + "recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return axle.eventBus(); } @Singleton @Produces @Deprecated public io.vertx.reactivex.core.eventbus.EventBus rxEventBus(io.vertx.reactivex.core.Vertx rx) { LOGGER.warn( "`io.vertx.reactivex.core.eventbus.EventBus` is deprecated and will be removed in a future version - it " + "is recommended to switch to `io.vertx.mutiny.core.eventbus.EventBus`"); return rx.eventBus(); } @Singleton @Produces public io.vertx.mutiny.core.eventbus.EventBus mutinyEventBus(io.vertx.mutiny.core.Vertx mutiny) { return mutiny.eventBus(); } /** * Undeploy verticles backed by contextual instances of {@link ApplicationScoped} beans before the application context is * destroyed. Otherwise Vertx may attempt to stop the verticles after the CDI container is shut down. * * @param event * @param beanManager */ }
In here the effectiveTypeDescriptor method gives the correct result. We are using that type symbol to get the langlibs. So this is not an issue right? ``` type Type1 object { }; type Type2 readonly & Type1; type Type3 Type2; type Type4 Type3; function name() { Type4 t = object {}; t.<cursor> } ``` I tried with the above example.
public List<FunctionSymbol> langLibMethods() { if (this.langLibFunctions == null) { if (this.effectiveTypeDescriptor().typeKind() == TypeDescKind.OBJECT) { this.langLibFunctions = this.effectiveTypeDescriptor().langLibMethods(); } else if (this.effectiveTypeDescriptor().typeKind() == TypeDescKind.TYPE_REFERENCE) { TypeReferenceTypeSymbol typeRef = (TypeReferenceTypeSymbol) this.effectiveTypeDescriptor(); this.langLibFunctions = typeRef.typeDescriptor().langLibMethods(); } else { LangLibrary langLibrary = LangLibrary.getInstance(this.context); List<FunctionSymbol> functions = langLibrary.getMethods( ((AbstractTypeSymbol) this.effectiveTypeDescriptor()).getBType()); this.langLibFunctions = filterLangLibMethods(functions, this.getBType()); } } return this.langLibFunctions; }
TypeReferenceTypeSymbol typeRef = (TypeReferenceTypeSymbol) this.effectiveTypeDescriptor();
public List<FunctionSymbol> langLibMethods() { if (this.langLibFunctions == null) { if (this.effectiveTypeDescriptor().typeKind() == TypeDescKind.OBJECT) { this.langLibFunctions = this.effectiveTypeDescriptor().langLibMethods(); } else if (this.effectiveTypeDescriptor().typeKind() == TypeDescKind.TYPE_REFERENCE) { TypeReferenceTypeSymbol typeRef = (TypeReferenceTypeSymbol) this.effectiveTypeDescriptor(); this.langLibFunctions = typeRef.typeDescriptor().langLibMethods(); } else { LangLibrary langLibrary = LangLibrary.getInstance(this.context); List<FunctionSymbol> functions = langLibrary.getMethods( ((AbstractTypeSymbol) this.effectiveTypeDescriptor()).getBType()); this.langLibFunctions = filterLangLibMethods(functions, this.getBType()); } } return this.langLibFunctions; }
class BallerinaIntersectionTypeSymbol extends AbstractTypeSymbol implements IntersectionTypeSymbol { private List<TypeSymbol> memberTypes; private TypeSymbol effectiveType; private String signature; public BallerinaIntersectionTypeSymbol(CompilerContext context, BIntersectionType intersectionType) { super(context, TypeDescKind.INTERSECTION, intersectionType); } @Override public List<TypeSymbol> memberTypeDescriptors() { if (this.memberTypes != null) { return this.memberTypes; } List<TypeSymbol> members = new ArrayList<>(); TypesFactory typesFactory = TypesFactory.getInstance(this.context); for (BType memberType : ((BIntersectionType) this.getBType()).getConstituentTypes()) { members.add(typesFactory.getTypeDescriptor(memberType)); } this.memberTypes = Collections.unmodifiableList(members); return this.memberTypes; } @Override public TypeSymbol effectiveTypeDescriptor() { if (this.effectiveType != null) { return this.effectiveType; } TypesFactory typesFactory = TypesFactory.getInstance(this.context); BType effectiveType = ((BIntersectionType) this.getBType()).effectiveType; this.effectiveType = typesFactory.getTypeDescriptor(effectiveType, effectiveType != null ? effectiveType.tsymbol : null, false, false, true); return this.effectiveType; } @Override @Override public String signature() { if (this.signature != null) { return this.signature; } List<TypeSymbol> memberTypes = this.memberTypeDescriptors(); StringJoiner joiner = new StringJoiner(" & "); memberTypes.forEach(typeDescriptor -> joiner.add(typeDescriptor.signature())); this.signature = joiner.toString(); return this.signature; } @Override public void accept(SymbolVisitor visitor) { visitor.visit(this); } @Override public <T> T apply(SymbolTransformer<T> transformer) { return transformer.transform(this); } }
class BallerinaIntersectionTypeSymbol extends AbstractTypeSymbol implements IntersectionTypeSymbol { private List<TypeSymbol> memberTypes; private TypeSymbol effectiveType; private String signature; public BallerinaIntersectionTypeSymbol(CompilerContext context, BIntersectionType intersectionType) { super(context, TypeDescKind.INTERSECTION, intersectionType); } @Override public List<TypeSymbol> memberTypeDescriptors() { if (this.memberTypes != null) { return this.memberTypes; } List<TypeSymbol> members = new ArrayList<>(); TypesFactory typesFactory = TypesFactory.getInstance(this.context); for (BType memberType : ((BIntersectionType) this.getBType()).getConstituentTypes()) { members.add(typesFactory.getTypeDescriptor(memberType)); } this.memberTypes = Collections.unmodifiableList(members); return this.memberTypes; } @Override public TypeSymbol effectiveTypeDescriptor() { if (this.effectiveType != null) { return this.effectiveType; } TypesFactory typesFactory = TypesFactory.getInstance(this.context); BType effectiveType = ((BIntersectionType) this.getBType()).effectiveType; this.effectiveType = typesFactory.getTypeDescriptor(effectiveType, effectiveType != null ? effectiveType.tsymbol : null, false, false, true); return this.effectiveType; } @Override @Override public String signature() { if (this.signature != null) { return this.signature; } List<TypeSymbol> memberTypes = this.memberTypeDescriptors(); StringJoiner joiner = new StringJoiner(" & "); memberTypes.forEach(typeDescriptor -> joiner.add(typeDescriptor.signature())); this.signature = joiner.toString(); return this.signature; } @Override public void accept(SymbolVisitor visitor) { visitor.visit(this); } @Override public <T> T apply(SymbolTransformer<T> transformer) { return transformer.transform(this); } }
AFAIK the generated subclass has a "forward method" for every intercepted method and this method is invoked if you do `IC.proceed()` in the last interceptor from the chain.
void forEachMethod(ClassInfo clazz, Consumer<MethodInfo> action) { for (MethodInfo method : clazz.methods()) { if (method.name().startsWith("<")) { continue; } if (Modifier.isPrivate(method.flags())) { continue; } if (Modifier.isFinal(method.flags())) { continue; } if (method.isSynthetic()) { continue; } action.accept(method); } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return; } forEachMethod(parentClass, action); }
void forEachMethod(ClassInfo clazz, Consumer<MethodInfo> action) { for (MethodInfo method : clazz.methods()) { if (method.name().startsWith("<")) { continue; } if (method.isSynthetic()) { continue; } action.accept(method); } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return; } forEachMethod(parentClass, action); }
class FaultToleranceScanner { private final IndexView index; private final AnnotationStore annotationStore; private final AnnotationProxyBuildItem proxy; private final ClassOutput output; FaultToleranceScanner(IndexView index, AnnotationStore annotationStore, AnnotationProxyBuildItem proxy, ClassOutput output) { this.index = index; this.annotationStore = annotationStore; this.proxy = proxy; this.output = output; } boolean hasFTAnnotations(ClassInfo clazz) { if (annotationStore.hasAnyAnnotation(clazz, DotNames.FT_ANNOTATIONS)) { return true; } for (MethodInfo method : clazz.methods()) { if (annotationStore.hasAnyAnnotation(method, DotNames.FT_ANNOTATIONS)) { return true; } } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return false; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return false; } return hasFTAnnotations(parentClass); } FaultToleranceMethod createFaultToleranceMethod(ClassInfo beanClass, MethodInfo method) { Set<Class<? extends Annotation>> annotationsPresentDirectly = new HashSet<>(); FaultToleranceMethod result = new FaultToleranceMethod(); result.beanClass = JandexReflection.load(beanClass.name()); result.method = createMethodDescriptor(method); result.asynchronous = getAnnotation(Asynchronous.class, method, beanClass, annotationsPresentDirectly); result.bulkhead = getAnnotation(Bulkhead.class, method, beanClass, annotationsPresentDirectly); result.circuitBreaker = getAnnotation(CircuitBreaker.class, method, beanClass, annotationsPresentDirectly); result.fallback = getAnnotation(Fallback.class, method, beanClass, annotationsPresentDirectly); result.retry = getAnnotation(Retry.class, method, beanClass, annotationsPresentDirectly); result.timeout = getAnnotation(Timeout.class, method, beanClass, annotationsPresentDirectly); result.circuitBreakerName = getAnnotation(CircuitBreakerName.class, method, beanClass, annotationsPresentDirectly); result.customBackoff = getAnnotation(CustomBackoff.class, method, beanClass, annotationsPresentDirectly); result.exponentialBackoff = getAnnotation(ExponentialBackoff.class, method, beanClass, annotationsPresentDirectly); result.fibonacciBackoff = getAnnotation(FibonacciBackoff.class, method, beanClass, annotationsPresentDirectly); result.blocking = getAnnotation(Blocking.class, method, beanClass, annotationsPresentDirectly); result.nonBlocking = getAnnotation(NonBlocking.class, method, beanClass, annotationsPresentDirectly); result.annotationsPresentDirectly = annotationsPresentDirectly; return result; } private MethodDescriptor createMethodDescriptor(MethodInfo method) { MethodDescriptor result = new MethodDescriptor(); result.declaringClass = JandexReflection.load(method.declaringClass().name()); result.name = method.name(); result.parameterTypes = method.parameters() .stream() .map(JandexReflection::loadRawType) .toArray(Class[]::new); result.returnType = JandexReflection.loadRawType(method.returnType()); return result; } private <A extends Annotation> A getAnnotation(Class<A> annotationType, MethodInfo method, ClassInfo beanClass, Set<Class<? extends Annotation>> directlyPresent) { DotName annotationName = DotName.createSimple(annotationType.getName()); if (annotationStore.hasAnnotation(method, annotationName)) { directlyPresent.add(annotationType); AnnotationInstance annotation = annotationStore.getAnnotation(method, annotationName); return createAnnotation(annotationType, annotation); } return getAnnotationFromClass(annotationType, beanClass); } private <A extends Annotation> A getAnnotationFromClass(Class<A> annotationType, ClassInfo clazz) { DotName annotationName = DotName.createSimple(annotationType.getName()); if (annotationStore.hasAnnotation(clazz, annotationName)) { AnnotationInstance annotation = annotationStore.getAnnotation(clazz, annotationName); return createAnnotation(annotationType, annotation); } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return null; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return null; } return getAnnotationFromClass(annotationType, parentClass); } private <A extends Annotation> A createAnnotation(Class<A> annotationType, AnnotationInstance instance) { return proxy.builder(instance, annotationType).build(output); } }
class FaultToleranceScanner { private final IndexView index; private final AnnotationStore annotationStore; private final AnnotationProxyBuildItem proxy; private final ClassOutput output; FaultToleranceScanner(IndexView index, AnnotationStore annotationStore, AnnotationProxyBuildItem proxy, ClassOutput output) { this.index = index; this.annotationStore = annotationStore; this.proxy = proxy; this.output = output; } boolean hasFTAnnotations(ClassInfo clazz) { if (annotationStore.hasAnyAnnotation(clazz, DotNames.FT_ANNOTATIONS)) { return true; } for (MethodInfo method : clazz.methods()) { if (annotationStore.hasAnyAnnotation(method, DotNames.FT_ANNOTATIONS)) { return true; } } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return false; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return false; } return hasFTAnnotations(parentClass); } FaultToleranceMethod createFaultToleranceMethod(ClassInfo beanClass, MethodInfo method) { Set<Class<? extends Annotation>> annotationsPresentDirectly = new HashSet<>(); FaultToleranceMethod result = new FaultToleranceMethod(); result.beanClass = load(beanClass.name()); result.method = createMethodDescriptor(method); result.asynchronous = getAnnotation(Asynchronous.class, method, beanClass, annotationsPresentDirectly); result.bulkhead = getAnnotation(Bulkhead.class, method, beanClass, annotationsPresentDirectly); result.circuitBreaker = getAnnotation(CircuitBreaker.class, method, beanClass, annotationsPresentDirectly); result.fallback = getAnnotation(Fallback.class, method, beanClass, annotationsPresentDirectly); result.retry = getAnnotation(Retry.class, method, beanClass, annotationsPresentDirectly); result.timeout = getAnnotation(Timeout.class, method, beanClass, annotationsPresentDirectly); result.circuitBreakerName = getAnnotation(CircuitBreakerName.class, method, beanClass, annotationsPresentDirectly); result.customBackoff = getAnnotation(CustomBackoff.class, method, beanClass, annotationsPresentDirectly); result.exponentialBackoff = getAnnotation(ExponentialBackoff.class, method, beanClass, annotationsPresentDirectly); result.fibonacciBackoff = getAnnotation(FibonacciBackoff.class, method, beanClass, annotationsPresentDirectly); result.blocking = getAnnotation(Blocking.class, method, beanClass, annotationsPresentDirectly); result.nonBlocking = getAnnotation(NonBlocking.class, method, beanClass, annotationsPresentDirectly); result.annotationsPresentDirectly = annotationsPresentDirectly; return result; } private MethodDescriptor createMethodDescriptor(MethodInfo method) { MethodDescriptor result = new MethodDescriptor(); result.declaringClass = load(method.declaringClass().name()); result.name = method.name(); result.parameterTypes = method.parameters() .stream() .map(JandexUtil::loadRawType) .toArray(Class[]::new); result.returnType = JandexUtil.loadRawType(method.returnType()); return result; } private <A extends Annotation> A getAnnotation(Class<A> annotationType, MethodInfo method, ClassInfo beanClass, Set<Class<? extends Annotation>> directlyPresent) { DotName annotationName = DotName.createSimple(annotationType.getName()); if (annotationStore.hasAnnotation(method, annotationName)) { directlyPresent.add(annotationType); AnnotationInstance annotation = annotationStore.getAnnotation(method, annotationName); return createAnnotation(annotationType, annotation); } return getAnnotationFromClass(annotationType, beanClass); } private <A extends Annotation> A getAnnotationFromClass(Class<A> annotationType, ClassInfo clazz) { DotName annotationName = DotName.createSimple(annotationType.getName()); if (annotationStore.hasAnnotation(clazz, annotationName)) { AnnotationInstance annotation = annotationStore.getAnnotation(clazz, annotationName); return createAnnotation(annotationType, annotation); } DotName parentClassName = clazz.superName(); if (parentClassName == null || parentClassName.equals(DotNames.OBJECT)) { return null; } ClassInfo parentClass = index.getClassByName(parentClassName); if (parentClass == null) { return null; } return getAnnotationFromClass(annotationType, parentClass); } private <A extends Annotation> A createAnnotation(Class<A> annotationType, AnnotationInstance instance) { return proxy.builder(instance, annotationType).build(output); } private static Class<?> load(DotName name) { try { return Thread.currentThread().getContextClassLoader().loadClass(name.toString()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }
Not sure why those weren't added before (they also exist prior to 3.0) and I was seeing a (non-blocking?) NPE w.r.t. to `mariadb.properties`, so I added [both](https://github.com/mariadb-corporation/mariadb-connector-j/tree/3.0.3/src/main/resources).
void addNativeImageResources(BuildProducer<NativeImageResourceBuildItem> resources) { resources.produce(new NativeImageResourceBuildItem("mariadb.properties", "driver.properties")); }
resources.produce(new NativeImageResourceBuildItem("mariadb.properties", "driver.properties"));
void addNativeImageResources(BuildProducer<NativeImageResourceBuildItem> resources) { resources.produce(new NativeImageResourceBuildItem("mariadb.properties")); }
class JDBCMariaDBProcessor { @BuildStep FeatureBuildItem feature() { return new FeatureBuildItem(Feature.JDBC_MARIADB); } @BuildStep void registerDriver(BuildProducer<JdbcDriverBuildItem> jdbcDriver) { jdbcDriver.produce( new JdbcDriverBuildItem(DatabaseKind.MARIADB, "org.mariadb.jdbc.Driver", "org.mariadb.jdbc.MariaDbDataSource")); } @BuildStep DevServicesDatasourceConfigurationHandlerBuildItem devDbHandler() { return DevServicesDatasourceConfigurationHandlerBuildItem.jdbc(DatabaseKind.MARIADB); } @BuildStep void configureAgroalConnection(BuildProducer<AdditionalBeanBuildItem> additionalBeans, Capabilities capabilities) { if (capabilities.isPresent(Capability.AGROAL)) { additionalBeans.produce(new AdditionalBeanBuildItem.Builder().addBeanClass(MariaDBAgroalConnectionConfigurer.class) .setDefaultScope(BuiltinScope.APPLICATION.getName()) .setUnremovable() .build()); } } @BuildStep void registerServiceBinding(Capabilities capabilities, BuildProducer<ServiceProviderBuildItem> serviceProvider, BuildProducer<DefaultDataSourceDbKindBuildItem> dbKind) { if (capabilities.isPresent(Capability.KUBERNETES_SERVICE_BINDING)) { serviceProvider.produce( new ServiceProviderBuildItem("io.quarkus.kubernetes.service.binding.runtime.ServiceBindingConverter", MariaDBServiceBindingConverter.class.getName())); } dbKind.produce(new DefaultDataSourceDbKindBuildItem(DatabaseKind.MARIADB)); } @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) }
class JDBCMariaDBProcessor { @BuildStep FeatureBuildItem feature() { return new FeatureBuildItem(Feature.JDBC_MARIADB); } @BuildStep void registerDriver(BuildProducer<JdbcDriverBuildItem> jdbcDriver, BuildProducer<DefaultDataSourceDbKindBuildItem> dbKind) { jdbcDriver.produce( new JdbcDriverBuildItem(DatabaseKind.MARIADB, "org.mariadb.jdbc.Driver", "org.mariadb.jdbc.MariaDbDataSource")); dbKind.produce(new DefaultDataSourceDbKindBuildItem(DatabaseKind.MARIADB)); } @BuildStep DevServicesDatasourceConfigurationHandlerBuildItem devDbHandler() { return DevServicesDatasourceConfigurationHandlerBuildItem.jdbc(DatabaseKind.MARIADB); } @BuildStep void configureAgroalConnection(BuildProducer<AdditionalBeanBuildItem> additionalBeans, Capabilities capabilities) { if (capabilities.isPresent(Capability.AGROAL)) { additionalBeans.produce(new AdditionalBeanBuildItem.Builder().addBeanClass(MariaDBAgroalConnectionConfigurer.class) .setDefaultScope(BuiltinScope.APPLICATION.getName()) .setUnremovable() .build()); } } @BuildStep void registerAuthenticationPlugins(BuildProducer<ServiceProviderBuildItem> serviceProvider) { serviceProvider .produce(ServiceProviderBuildItem.allProvidersFromClassPath("org.mariadb.jdbc.plugin.AuthenticationPlugin")); } @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) @BuildStep void registerServiceBinding(Capabilities capabilities, BuildProducer<ServiceProviderBuildItem> serviceProvider) { if (capabilities.isPresent(Capability.KUBERNETES_SERVICE_BINDING)) { serviceProvider.produce( new ServiceProviderBuildItem("io.quarkus.kubernetes.service.binding.runtime.ServiceBindingConverter", MariaDBServiceBindingConverter.class.getName())); } } }
JDBC maybe does not have these time attributes
public List<String> supportedProperties() { List<String> properties = new ArrayList<>(); properties.add(CONNECTOR_DRIVER_NAME); properties.add(CONNECTOR_URL); properties.add(CONNECTOR_TABLE_NAME); properties.add(CONNECTOR_USERNAME); properties.add(CONNECTOR_PASSWORD); properties.add(CONNECTOR_SCAN_PARTITION_COLUMN_NAME); properties.add(CONNECTOR_SCAN_PARTITION_SIZE); properties.add(CONNECTOR_SCAN_PARTITION_LOWER); properties.add(CONNECTOR_SCAN_PARTITION_UPPER); properties.add(CONNECTOR_LOOKUP_CACHE_MAX_SIZE); properties.add(CONNECTOR_LOOKUP_CACHE_EXPIRE_MS); properties.add(CONNECTOR_LOOKUP_MAX_RETRY_TIMES); properties.add(CONNECTOR_SINK_UPDATE_MODE); properties.add(CONNECTOR_SINK_FLUSH_MAX_SIZE); properties.add(CONNECTOR_SINK_FLUSH_INTERVAL_MS); properties.add(CONNECTOR_SINK_MAX_RETRY_TIMES); properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". properties.add(SCHEMA + ". return properties; }
properties.add(SCHEMA + ".
public List<String> supportedProperties() { List<String> properties = new ArrayList<>(); properties.add(CONNECTOR_DRIVER); properties.add(CONNECTOR_URL); properties.add(CONNECTOR_TABLE); properties.add(CONNECTOR_USERNAME); properties.add(CONNECTOR_PASSWORD); properties.add(CONNECTOR_READ_PARTITION_COLUMN); properties.add(CONNECTOR_READ_PARTITION_NUM); properties.add(CONNECTOR_READ_PARTITION_LOWER_BOUND); properties.add(CONNECTOR_READ_PARTITION_UPPER_BOUND); properties.add(CONNECTOR_READ_FETCH_SIZE); properties.add(CONNECTOR_LOOKUP_CACHE_MAX_ROWS); properties.add(CONNECTOR_LOOKUP_CACHE_TTL); properties.add(CONNECTOR_LOOKUP_MAX_RETRIES); properties.add(CONNECTOR_WRITE_FLUSH_MAX_ROWS); properties.add(CONNECTOR_WRITE_FLUSH_INTERVAL); properties.add(CONNECTOR_WRITE_MAX_RETRIES); properties.add(SCHEMA + ". properties.add(SCHEMA + ". return properties; }
class JDBCTableSourceSinkFactory implements StreamTableSourceFactory<Row>, StreamTableSinkFactory<Tuple2<Boolean, Row>> { public static final String CONNECTOR_DRIVER_NAME = "connector.driver.name"; public static final String CONNECTOR_URL = "connector.url"; public static final String CONNECTOR_TABLE_NAME = "connector.table.name"; public static final String CONNECTOR_USERNAME = "connector.username"; public static final String CONNECTOR_PASSWORD = "connector.password"; public static final String CONNECTOR_SCAN_PARTITION_COLUMN_NAME = "connector.scan.partition.column.name"; public static final String CONNECTOR_SCAN_PARTITION_SIZE = "connector.scan.partition.size"; public static final String CONNECTOR_SCAN_PARTITION_LOWER = "connector.scan.partition.lower"; public static final String CONNECTOR_SCAN_PARTITION_UPPER = "connector.scan.partition.upper"; public static final String CONNECTOR_LOOKUP_CACHE_MAX_SIZE = "connector.lookup.cache.max-size"; public static final String CONNECTOR_LOOKUP_CACHE_EXPIRE_MS = "connector.lookup.cache.expire-ms"; public static final String CONNECTOR_LOOKUP_MAX_RETRY_TIMES = "connector.lookup.max-retry-times"; public static final String CONNECTOR_SINK_UPDATE_MODE = "connector.sink.update-mode"; public static final String CONNECTOR_SINK_FLUSH_MAX_SIZE = "connector.sink.flush.max-size"; public static final String CONNECTOR_SINK_FLUSH_INTERVAL_MS = "connector.sink.flush.interval-ms"; public static final String CONNECTOR_SINK_MAX_RETRY_TIMES = "connector.sink.max-retry-times"; @Override public Map<String, String> requiredContext() { Map<String, String> context = new HashMap<>(); context.put(CONNECTOR_TYPE, "jdbc"); context.put(CONNECTOR_PROPERTY_VERSION, "1"); return context; } @Override @Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final JDBCOptions options = getJDBCOptions(descriptorProperties); final Optional<String> partitionColumnName = descriptorProperties.getOptionalString(CONNECTOR_SCAN_PARTITION_COLUMN_NAME); final Optional<String> partitionSize = descriptorProperties.getOptionalString(CONNECTOR_SCAN_PARTITION_SIZE); final Optional<String> partitionLower = descriptorProperties.getOptionalString(CONNECTOR_SCAN_PARTITION_LOWER); final Optional<String> partitionUpper = descriptorProperties.getOptionalString(CONNECTOR_SCAN_PARTITION_UPPER); final JDBCScanOptions scanOptions; if (partitionColumnName.isPresent()) { Preconditions.checkArgument( partitionSize.isPresent() && partitionLower.isPresent() && partitionUpper.isPresent(), "Either all or none of the properties of scan options should be provided"); scanOptions = JDBCScanOptions.builder() .setPartitionColumnName(partitionColumnName.get()) .setPartitionSize(getLongOrThrow(partitionSize.get(), CONNECTOR_SCAN_PARTITION_SIZE)) .setPartitionLowerBound(getLongOrThrow(partitionLower.get(), CONNECTOR_SCAN_PARTITION_LOWER)) .setPartitionUpperBound(getLongOrThrow(partitionUpper.get(), CONNECTOR_SCAN_PARTITION_UPPER)) .build(); } else { scanOptions = null; } final Optional<String> lookupCacheMaxSize = descriptorProperties.getOptionalString(CONNECTOR_LOOKUP_CACHE_MAX_SIZE); final Optional<String> lookupCacheExpireMs = descriptorProperties.getOptionalString(CONNECTOR_LOOKUP_CACHE_EXPIRE_MS); final Optional<String> lookupMaxRetryTimes = descriptorProperties.getOptionalString(CONNECTOR_LOOKUP_MAX_RETRY_TIMES); JDBCLookupOptions.Builder lookupOptionsBuilder = JDBCLookupOptions.builder(); if (lookupCacheMaxSize.isPresent()) { lookupOptionsBuilder = lookupOptionsBuilder.setCacheMaxSize( getLongOrThrow(lookupCacheMaxSize.get(), CONNECTOR_LOOKUP_CACHE_MAX_SIZE)); } if (lookupCacheExpireMs.isPresent()) { lookupOptionsBuilder = lookupOptionsBuilder.setCacheExpireMs( getLongOrThrow(lookupCacheExpireMs.get(), CONNECTOR_LOOKUP_CACHE_EXPIRE_MS)); } if (lookupMaxRetryTimes.isPresent()) { lookupOptionsBuilder = lookupOptionsBuilder.setMaxRetryTimes( getIntOrThrow(lookupMaxRetryTimes.get(), CONNECTOR_LOOKUP_MAX_RETRY_TIMES)); } final JDBCLookupOptions lookupOptions = lookupOptionsBuilder.build(); return JDBCTableSource.builder() .setOptions(options) .setScanOptions(scanOptions) .setLookupOptions(lookupOptions) .setSchema(descriptorProperties.getTableSchema(SCHEMA)) .build(); } @Override public StreamTableSink<Tuple2<Boolean, Row>> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); JDBCOptions options = getJDBCOptions(descriptorProperties); JDBCUpsertTableSink.Builder builder = JDBCUpsertTableSink.builder() .setOptions(options) .setTableSchema(descriptorProperties.getTableSchema(SCHEMA)); Optional<String> size = descriptorProperties.getOptionalString(CONNECTOR_SINK_FLUSH_MAX_SIZE); Optional<String> time = descriptorProperties.getOptionalString(CONNECTOR_SINK_FLUSH_INTERVAL_MS); Optional<String> retry = descriptorProperties.getOptionalString(CONNECTOR_SINK_MAX_RETRY_TIMES); if (size.isPresent()) { builder = builder.setFlushMaxSize(getIntOrThrow(size.get(), CONNECTOR_SINK_FLUSH_MAX_SIZE)); } if (time.isPresent()) { builder = builder.setFlushIntervalMills(getLongOrThrow(time.get(), CONNECTOR_SINK_FLUSH_INTERVAL_MS)); } if (retry.isPresent()) { builder = builder.setMaxRetryTimes(getIntOrThrow(retry.get(), CONNECTOR_SINK_MAX_RETRY_TIMES)); } JDBCUpsertTableSink sink = builder.build(); sink.setIsAppendOnly(descriptorProperties.getString(CONNECTOR_SINK_UPDATE_MODE).equals("append")); return sink; } private DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); new SchemaValidator(true, false, false).validate(descriptorProperties); return descriptorProperties; } private JDBCOptions getJDBCOptions(DescriptorProperties descriptorProperties) { final String driverName = descriptorProperties.getString(CONNECTOR_DRIVER_NAME); final String url = descriptorProperties.getString(CONNECTOR_URL); final String tableName = descriptorProperties.getString(CONNECTOR_TABLE_NAME); final Optional<String> username = descriptorProperties.getOptionalString(CONNECTOR_USERNAME); final Optional<String> password = descriptorProperties.getOptionalString(CONNECTOR_PASSWORD); final Optional<JDBCDialect> dialect = JDBCDialects.get(url); Preconditions.checkState(dialect.isPresent(), "Cannot handle such jdbc url: " + url); JDBCOptions.Builder builder = JDBCOptions.builder() .setDriverName(driverName) .setDBUrl(url) .setTableName(tableName) .setDialect(dialect.get()); if (username.isPresent()) { Preconditions.checkState( password.isPresent(), "Both db username and password should be provided, or none should be provided"); builder = builder.setUsername(username.get()).setPassword(password.get()); } return builder.build(); } private int getIntOrThrow(String str, String name) { int ret; try { ret = Integer.valueOf(str); } catch (NumberFormatException e) { throw new IllegalArgumentException(name + " should be an integer value"); } return ret; } private long getLongOrThrow(String str, String name) { long ret; try { ret = Long.valueOf(str); } catch (NumberFormatException e) { throw new IllegalArgumentException(name + " should be a long integer value"); } return ret; } }
class JDBCTableSourceSinkFactory implements StreamTableSourceFactory<Row>, StreamTableSinkFactory<Tuple2<Boolean, Row>> { @Override public Map<String, String> requiredContext() { Map<String, String> context = new HashMap<>(); context.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_JDBC); context.put(CONNECTOR_PROPERTY_VERSION, "1"); return context; } @Override @Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); return JDBCTableSource.builder() .setOptions(getJDBCOptions(descriptorProperties)) .setReadOptions(getJDBCReadOptions(descriptorProperties)) .setLookupOptions(getJDBCLookupOptions(descriptorProperties)) .setSchema(descriptorProperties.getTableSchema(SCHEMA)) .build(); } @Override public StreamTableSink<Tuple2<Boolean, Row>> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final JDBCUpsertTableSink.Builder builder = JDBCUpsertTableSink.builder() .setOptions(getJDBCOptions(descriptorProperties)) .setTableSchema(descriptorProperties.getTableSchema(SCHEMA)); descriptorProperties.getOptionalInt(CONNECTOR_WRITE_FLUSH_MAX_ROWS).ifPresent(builder::setFlushMaxSize); descriptorProperties.getOptionalDuration(CONNECTOR_WRITE_FLUSH_INTERVAL).ifPresent( s -> builder.setFlushIntervalMills(s.toMillis())); descriptorProperties.getOptionalInt(CONNECTOR_WRITE_MAX_RETRIES).ifPresent(builder::setMaxRetryTimes); return builder.build(); } private DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); new SchemaValidator(true, false, false).validate(descriptorProperties); new JDBCValidator().validate(descriptorProperties); return descriptorProperties; } private JDBCOptions getJDBCOptions(DescriptorProperties descriptorProperties) { final String url = descriptorProperties.getString(CONNECTOR_URL); final JDBCOptions.Builder builder = JDBCOptions.builder() .setDBUrl(url) .setTableName(descriptorProperties.getString(CONNECTOR_TABLE)) .setDialect(JDBCDialects.get(url).get()); descriptorProperties.getOptionalString(CONNECTOR_DRIVER).ifPresent(builder::setDriverName); descriptorProperties.getOptionalString(CONNECTOR_USERNAME).ifPresent(builder::setUsername); descriptorProperties.getOptionalString(CONNECTOR_PASSWORD).ifPresent(builder::setPassword); return builder.build(); } private JDBCReadOptions getJDBCReadOptions(DescriptorProperties descriptorProperties) { final Optional<String> partitionColumnName = descriptorProperties.getOptionalString(CONNECTOR_READ_PARTITION_COLUMN); final Optional<Long> partitionLower = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_LOWER_BOUND); final Optional<Long> partitionUpper = descriptorProperties.getOptionalLong(CONNECTOR_READ_PARTITION_UPPER_BOUND); final Optional<Integer> numPartitions = descriptorProperties.getOptionalInt(CONNECTOR_READ_PARTITION_NUM); final JDBCReadOptions.Builder builder = JDBCReadOptions.builder(); if (partitionColumnName.isPresent()) { builder.setPartitionColumnName(partitionColumnName.get()); builder.setPartitionLowerBound(partitionLower.get()); builder.setPartitionUpperBound(partitionUpper.get()); builder.setNumPartitions(numPartitions.get()); } descriptorProperties.getOptionalInt(CONNECTOR_READ_FETCH_SIZE).ifPresent(builder::setFetchSize); return builder.build(); } private JDBCLookupOptions getJDBCLookupOptions(DescriptorProperties descriptorProperties) { final JDBCLookupOptions.Builder builder = JDBCLookupOptions.builder(); descriptorProperties.getOptionalLong(CONNECTOR_LOOKUP_CACHE_MAX_ROWS).ifPresent(builder::setCacheMaxSize); descriptorProperties.getOptionalDuration(CONNECTOR_LOOKUP_CACHE_TTL).ifPresent( s -> builder.setCacheExpireMs(s.toMillis())); descriptorProperties.getOptionalInt(CONNECTOR_LOOKUP_MAX_RETRIES).ifPresent(builder::setMaxRetryTimes); return builder.build(); } }
Got two more confirmed. Currently there's some issue with FunctionAppTest for ACA, reported to service team. Already confirmed with Vinay that the related resources has been deleted. Guess it's fine to update recordings afterwards.
private void addSanitizers() { List<TestProxySanitizer> sanitizers = new ArrayList<>(Arrays.asList( new TestProxySanitizer("(?<=/subscriptions/)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("(?<=%2Fsubscriptions%2F)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("Retry-After", null, "0", TestProxySanitizerType.HEADER), new TestProxySanitizer("$..secretText", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..keys[*].value", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..adminPassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..Password", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..accessSAS", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.osProfile.customData", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..administratorLoginPassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..hubDatabasePassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..aliasPrimaryConnectionString", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..aliasSecondaryConnectionString", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryReadonlyMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryReadonlyMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..passwords[*].value", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secret", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.siteConfig.machineKey.decryptionKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("(?:AccountKey=)(?<accountKey>.*?)(?:;)", REDACTED_VALUE, TestProxySanitizerType.BODY_REGEX).setGroupForReplace("accountKey"), new TestProxySanitizer("$.properties.WEBSITE_AUTH_ENCRYPTION_KEY", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.DOCKER_REGISTRY_SERVER_PASSWORD", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY) )); sanitizers.addAll(this.sanitizers); interceptorManager.addSanitizers(sanitizers); }
new TestProxySanitizer("$.properties.WEBSITE_AUTH_ENCRYPTION_KEY", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY),
private void addSanitizers() { List<TestProxySanitizer> sanitizers = new ArrayList<>(Arrays.asList( new TestProxySanitizer("(?<=/subscriptions/)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("(?<=%2Fsubscriptions%2F)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("Retry-After", null, "0", TestProxySanitizerType.HEADER), new TestProxySanitizer("$..secretText", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..keys[*].value", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..adminPassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..Password", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..accessSAS", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.osProfile.customData", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..administratorLoginPassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..hubDatabasePassword", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..aliasPrimaryConnectionString", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..aliasSecondaryConnectionString", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..primaryReadonlyMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secondaryReadonlyMasterKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..passwords[*].value", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$..secret", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.siteConfig.machineKey.decryptionKey", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("(?:AccountKey=)(?<accountKey>.*?)(?:;)", REDACTED_VALUE, TestProxySanitizerType.BODY_REGEX).setGroupForReplace("accountKey"), new TestProxySanitizer("$.properties.WEBSITE_AUTH_ENCRYPTION_KEY", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY), new TestProxySanitizer("$.properties.DOCKER_REGISTRY_SERVER_PASSWORD", null, REDACTED_VALUE, TestProxySanitizerType.BODY_KEY) )); sanitizers.addAll(this.sanitizers); interceptorManager.addSanitizers(sanitizers); }
class of the manager * @param httpPipeline the http pipeline * @param profile the azure profile * @param <T> the type of the manager * @return the manager instance * @throws RuntimeException when field cannot be found or set. */ protected <T> T buildManager(Class<T> manager, HttpPipeline httpPipeline, AzureProfile profile) { try { Constructor<T> constructor = manager.getDeclaredConstructor(httpPipeline.getClass(), profile.getClass()); setAccessible(constructor); return constructor.newInstance(httpPipeline, profile); } catch (ReflectiveOperationException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } }
class of the manager * @param httpPipeline the http pipeline * @param profile the azure profile * @param <T> the type of the manager * @return the manager instance * @throws RuntimeException when field cannot be found or set. */ protected <T> T buildManager(Class<T> manager, HttpPipeline httpPipeline, AzureProfile profile) { try { Constructor<T> constructor = manager.getDeclaredConstructor(httpPipeline.getClass(), profile.getClass()); setAccessible(constructor); return constructor.newInstance(httpPipeline, profile); } catch (ReflectiveOperationException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } }
Yeah verified it in Windows now, seems to be working fine.
void generateJavaBindings() throws BindgenException { outStream.println("\nResolving maven dependencies..."); resolvePlatformLibraries(); if ((mvnGroupId != null) && (mvnArtifactId != null) && (mvnVersion != null)) { new BindgenMvnResolver(outStream, env).mavenResolver(mvnGroupId, mvnArtifactId, mvnVersion, env.getProjectRoot(), true); } ClassLoader classLoader = setClassLoader(); if (classLoader != null) { setDirectoryPaths(); outStream.println("\nGenerating bindings for: "); generateBindings(classNames, classLoader, modulePath); if (!env.getSuperClasses().isEmpty()) { env.setAllJavaClasses(env.getSuperClasses()); env.getSuperClasses().removeAll(classNames); generateBindings(new HashSet<>(env.getSuperClasses()), classLoader, modulePath); } if (!env.getClassListForLooping().isEmpty()) { outStream.println("\nGenerating dependency bindings for: "); env.setDirectJavaClass(false); } while (!env.getClassListForLooping().isEmpty()) { Set<String> newSet = new HashSet<>(env.getClassListForLooping()); newSet.removeAll(classNames); newSet.removeAll(env.getSuperClasses()); env.setAllJavaClasses(newSet); env.clearClassListForLooping(); generateBindings(newSet, classLoader, dependenciesPath); } generateUtilFiles(); if (env.getFailedClassGens() != null) { handleFailedClassGens(); } if (env.getFailedMethodGens() != null) { for (String entry : env.getFailedMethodGens()) { errStream.println(entry); } } try { ((URLClassLoader) classLoader).close(); } catch (IOException e) { outStream.println("\nError while exiting the classloader:\n" + e.getMessage()); } catch (ClassCastException ignore) { } } }
outStream.println("\nResolving maven dependencies...");
void generateJavaBindings() throws BindgenException { outStream.println("\nResolving maven dependencies..."); resolvePlatformLibraries(); if ((mvnGroupId != null) && (mvnArtifactId != null) && (mvnVersion != null)) { new BindgenMvnResolver(outStream, env).mavenResolver(mvnGroupId, mvnArtifactId, mvnVersion, env.getProjectRoot(), true); } ClassLoader classLoader = setClassLoader(); if (classLoader != null) { setDirectoryPaths(); outStream.println("\nGenerating bindings for: "); generateBindings(classNames, classLoader, modulePath); if (!env.getSuperClasses().isEmpty()) { env.setAllJavaClasses(env.getSuperClasses()); env.getSuperClasses().removeAll(classNames); generateBindings(new HashSet<>(env.getSuperClasses()), classLoader, modulePath); } if (!env.getClassListForLooping().isEmpty()) { outStream.println("\nGenerating dependency bindings for: "); env.setDirectJavaClass(false); } while (!env.getClassListForLooping().isEmpty()) { Set<String> newSet = new HashSet<>(env.getClassListForLooping()); newSet.removeAll(classNames); newSet.removeAll(env.getSuperClasses()); env.setAllJavaClasses(newSet); env.clearClassListForLooping(); generateBindings(newSet, classLoader, dependenciesPath); } generateUtilFiles(); if (env.getFailedClassGens() != null) { handleFailedClassGens(); } if (env.getFailedMethodGens() != null) { for (String entry : env.getFailedMethodGens()) { errStream.println(entry); } } try { ((URLClassLoader) classLoader).close(); } catch (IOException e) { outStream.println("\nError while exiting the classloader:\n" + e.getMessage()); } catch (ClassCastException ignore) { } } }
class BindingsGenerator { private final BindgenEnv env; private Path modulePath; private Path dependenciesPath; private Path utilsDirPath; private String mvnGroupId; private String mvnArtifactId; private String mvnVersion; private PrintStream errStream; private PrintStream outStream; private Set<String> classNames = new HashSet<>(); private Path userDir = Paths.get(System.getProperty(USER_DIR)); BindingsGenerator(PrintStream out, PrintStream err) { this.outStream = out; this.errStream = err; this.env = new BindgenEnv(); } private void resolvePlatformLibraries() throws BindgenException { if (env.getProjectRoot() != null) { TomlDocument tomlDocument = env.getTomlDocument(); PackageManifest.Platform platform = getPackagePlatform(tomlDocument); if (platform != null && platform.dependencies() != null) { for (Map<String, Object> library : platform.dependencies()) { if (library.get("path") != null) { handlePathDependency(library.get("path").toString()); } else if (library.get("groupId") != null && library.get("artifactId") != null && library.get("version") != null) { resolveMvnDependency(library.get("groupId").toString(), library.get("artifactId").toString(), library.get("version").toString()); } } } } } private PackageManifest.Platform getPackagePlatform(TomlDocument tomlDocument) { if (tomlDocument != null) { PackageManifest packageManifest = ManifestBuilder.from(tomlDocument, null, env.getProjectRoot()).packageManifest(); if (packageManifest != null) { return packageManifest.platform(JvmTarget.JAVA_11.code()); } } return null; } private void handlePathDependency(String libPath) { Path libraryPath; if (Paths.get(libPath).isAbsolute()) { libraryPath = Paths.get(libPath); } else { libraryPath = Paths.get(env.getProjectRoot().toString(), libPath); } env.addClasspath(libraryPath.toString()); } private void resolveMvnDependency(String mvnGroupId, String mvnArtifactId, String mvnVersion) throws BindgenException { new BindgenMvnResolver(outStream, env).mavenResolver(mvnGroupId, mvnArtifactId, mvnVersion, env.getProjectRoot(), false); } private ClassLoader setClassLoader() throws BindgenException { ClassLoader classLoader; if (!env.getClassPaths().isEmpty()) { classLoader = getClassLoader(env.getClassPaths(), this.getClass().getClassLoader()); } else { outStream.println("\nNo classpaths were detected."); classLoader = this.getClass().getClassLoader(); } return classLoader; } private void setDirectoryPaths() throws BindgenException { String userPath = userDir.toString(); String outputPath = env.getOutputPath(); if (env.getModulesFlag()) { userPath = Paths.get(userPath, MODULES_DIR).toString(); } else if (outputPath != null) { if (!Paths.get(outputPath).toFile().exists()) { throw new BindgenException("error: output path provided could not be found: " + outputPath); } userPath = outputPath; } utilsDirPath = dependenciesPath = modulePath = Paths.get(userPath); } private void handleFailedClassGens() { errStream.print("\n"); for (Map.Entry<String, String> entry : env.getFailedClassGens().entrySet()) { if (classNames.contains(entry.getKey())) { errStream.println("error: unable to generate the '" + entry.getKey() + "' binding class: " + entry.getValue()); } } } private void generateUtilFiles() throws BindgenException { String utilsDirStrPath = utilsDirPath.toString(); createDirectory(utilsDirStrPath); for (JError jError : env.getExceptionList()) { String fileName = jError.getShortExceptionName() + BAL_EXTENSION; if (env.getModulesFlag()) { utilsDirStrPath = Paths.get(modulePath.toString(), jError.getPackageName()).toString(); createDirectory(utilsDirStrPath); } outputSyntaxTreeFile(jError, env, Paths.get(utilsDirStrPath, fileName).toString(), false); } } void setOutputPath(String output) { this.env.setOutputPath(output); } void setDependentJars(String[] jarPaths) { for (String path : jarPaths) { env.addClasspath(path); } } void setClassNames(List<String> classNames) { this.classNames = new HashSet<>(classNames); } private void generateBindings(Set<String> classList, ClassLoader classLoader, Path modulePath) throws BindgenException { createDirectory(modulePath.toString()); for (String c : classList) { try { if (classLoader != null) { Class classInstance = classLoader.loadClass(c); if (classInstance != null && isPublicClass(classInstance)) { JClass jClass = new JClass(classInstance, env); Path filePath; if (env.getModulesFlag()) { String outputFile = Paths.get(modulePath.toString(), jClass.getPackageName()).toString(); createDirectory(outputFile); filePath = Paths.get(outputFile, jClass.getShortClassName() + BAL_EXTENSION); } else { filePath = Paths.get(modulePath.toString(), jClass.getShortClassName() + BAL_EXTENSION); } if (Files.exists(filePath) && !env.isDirectJavaClass()) { continue; } outputSyntaxTreeFile(jClass, env, filePath.toString(), false); outStream.println("\t" + c); } } } catch (ClassNotFoundException | NoClassDefFoundError e) { env.setFailedClassGens(c, e.toString()); } } } void setMvnGroupId(String mvnGroupId) { this.mvnGroupId = mvnGroupId; } void setMvnArtifactId(String mvnArtifactId) { this.mvnArtifactId = mvnArtifactId; } void setMvnVersion(String mvnVersion) { this.mvnVersion = mvnVersion; } void setPublic() { this.env.setPublicFlag(true); } void setModulesFlag(boolean modulesFlag) { this.env.setPublicFlag(true); this.env.setModulesFlag(modulesFlag); } void setProject(Project project) { this.env.setProject(project); } }
class BindingsGenerator { private final BindgenEnv env; private Path modulePath; private Path dependenciesPath; private Path utilsDirPath; private String mvnGroupId; private String mvnArtifactId; private String mvnVersion; private PrintStream errStream; private PrintStream outStream; private Set<String> classNames = new HashSet<>(); private Path userDir = Paths.get(System.getProperty(USER_DIR)); BindingsGenerator(PrintStream out, PrintStream err) { this.outStream = out; this.errStream = err; this.env = new BindgenEnv(); } private void resolvePlatformLibraries() throws BindgenException { if (env.getProjectRoot() != null) { TomlDocument tomlDocument = env.getTomlDocument(); PackageManifest.Platform platform = getPackagePlatform(tomlDocument); if (platform != null && platform.dependencies() != null) { for (Map<String, Object> library : platform.dependencies()) { if (library.get("path") != null) { handlePathDependency(library.get("path").toString()); } else if (library.get("groupId") != null && library.get("artifactId") != null && library.get("version") != null) { resolveMvnDependency(library.get("groupId").toString(), library.get("artifactId").toString(), library.get("version").toString()); } } } } } private PackageManifest.Platform getPackagePlatform(TomlDocument tomlDocument) { if (tomlDocument != null) { PackageManifest packageManifest = ManifestBuilder.from(tomlDocument, null, env.getProjectRoot()).packageManifest(); if (packageManifest != null) { return packageManifest.platform(JvmTarget.JAVA_11.code()); } } return null; } private void handlePathDependency(String libPath) { Path libraryPath; if (Paths.get(libPath).isAbsolute()) { libraryPath = Paths.get(libPath); } else { libraryPath = Paths.get(env.getProjectRoot().toString(), libPath); } env.addClasspath(libraryPath.toString()); } private void resolveMvnDependency(String mvnGroupId, String mvnArtifactId, String mvnVersion) throws BindgenException { new BindgenMvnResolver(outStream, env).mavenResolver(mvnGroupId, mvnArtifactId, mvnVersion, env.getProjectRoot(), false); } private ClassLoader setClassLoader() throws BindgenException { ClassLoader classLoader; if (!env.getClassPaths().isEmpty()) { classLoader = getClassLoader(env.getClassPaths(), this.getClass().getClassLoader()); } else { outStream.println("\nNo classpaths were detected."); classLoader = this.getClass().getClassLoader(); } return classLoader; } private void setDirectoryPaths() throws BindgenException { String userPath = userDir.toString(); String outputPath = env.getOutputPath(); if (env.getModulesFlag()) { userPath = Paths.get(userPath, MODULES_DIR).toString(); } else if (outputPath != null) { if (!Paths.get(outputPath).toFile().exists()) { throw new BindgenException("error: output path provided could not be found: " + outputPath); } userPath = outputPath; } utilsDirPath = dependenciesPath = modulePath = Paths.get(userPath); } private void handleFailedClassGens() { errStream.print("\n"); for (Map.Entry<String, String> entry : env.getFailedClassGens().entrySet()) { if (classNames.contains(entry.getKey())) { errStream.println("error: unable to generate the '" + entry.getKey() + "' binding class: " + entry.getValue()); } } } private void generateUtilFiles() throws BindgenException { String utilsDirStrPath = utilsDirPath.toString(); createDirectory(utilsDirStrPath); for (JError jError : env.getExceptionList()) { String fileName = jError.getShortExceptionName() + BAL_EXTENSION; if (env.getModulesFlag()) { utilsDirStrPath = Paths.get(modulePath.toString(), jError.getPackageName()).toString(); createDirectory(utilsDirStrPath); } outputSyntaxTreeFile(jError, env, Paths.get(utilsDirStrPath, fileName).toString(), false); } } void setOutputPath(String output) { this.env.setOutputPath(output); } void setDependentJars(String[] jarPaths) { for (String path : jarPaths) { env.addClasspath(path); } } void setClassNames(List<String> classNames) { this.classNames = new HashSet<>(classNames); } private void generateBindings(Set<String> classList, ClassLoader classLoader, Path modulePath) throws BindgenException { createDirectory(modulePath.toString()); for (String c : classList) { try { if (classLoader != null) { Class classInstance = classLoader.loadClass(c); if (classInstance != null && isPublicClass(classInstance)) { JClass jClass = new JClass(classInstance, env); Path filePath; if (env.getModulesFlag()) { String outputFile = Paths.get(modulePath.toString(), jClass.getPackageName()).toString(); createDirectory(outputFile); filePath = Paths.get(outputFile, jClass.getShortClassName() + BAL_EXTENSION); } else { filePath = Paths.get(modulePath.toString(), jClass.getShortClassName() + BAL_EXTENSION); } if (Files.exists(filePath) && !env.isDirectJavaClass()) { continue; } outputSyntaxTreeFile(jClass, env, filePath.toString(), false); outStream.println("\t" + c); } } } catch (ClassNotFoundException | NoClassDefFoundError e) { env.setFailedClassGens(c, e.toString()); } } } void setMvnGroupId(String mvnGroupId) { this.mvnGroupId = mvnGroupId; } void setMvnArtifactId(String mvnArtifactId) { this.mvnArtifactId = mvnArtifactId; } void setMvnVersion(String mvnVersion) { this.mvnVersion = mvnVersion; } void setPublic() { this.env.setPublicFlag(true); } void setModulesFlag(boolean modulesFlag) { this.env.setPublicFlag(true); this.env.setModulesFlag(modulesFlag); } void setProject(Project project) { this.env.setProject(project); } }
+1 , i'll make that change across all the commands.
public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(Constants.RUN_COMMAND); this.errStream.println(commandUsageInfo); return; } if (this.argList == null || this.argList.size() == 0) { CommandUtil.printError(this.errStream, "no ballerina program given.", "ballerina run {<bal-file> | <module-name>}", true); Runtime.getRuntime().exit(1); return; } if (null != this.debugPort) { System.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort); } String[] programArgs = this.getProgramArgs(this.argList); Path sourceRootPath = this.sourceRoot == null ? Paths.get(System.getProperty("user.dir")) : Paths.get(this.sourceRoot); Path sourcePath; Path targetPath; if (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) { if (Paths.get(this.argList.get(0)).isAbsolute()) { sourcePath = Paths.get(this.argList.get(0)); sourceRootPath = sourcePath.getParent(); } else { sourcePath = sourceRootPath.resolve(this.argList.get(0)); } if (Files.notExists(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' Ballerina file does not exist.", null, false); Runtime.getRuntime().exit(1); return; } if (!Files.isRegularFile(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' is not a Ballerina file. check if it is a symlink or a shortcut.", null, false); Runtime.getRuntime().exit(1); return; } try { targetPath = Files.createTempDirectory("ballerina-run-" + System.nanoTime()); } catch (IOException e) { throw LauncherUtils.createLauncherException("error occurred when creating executable."); } } else if (Files.exists( sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) && Files.isDirectory( sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0)))) { if (!RepoUtils.isBallerinaProject(sourceRootPath)) { CommandUtil.printError(this.errStream, "you are trying to run a module that is not inside a project.", null, false); Runtime.getRuntime().exit(1); return; } if (Paths.get(argList.get(0)).isAbsolute()) { CommandUtil.printError(this.errStream, "you are trying to run a module by giving the absolute path. you only need give " + "the name of the module.", "ballerina run <module-name>", true); Runtime.getRuntime().exit(1); return; } String moduleName = argList.get(0); if (moduleName.endsWith("/")) { moduleName = moduleName.substring(0, moduleName.length() - 1); } sourcePath = Paths.get(moduleName); if (Files.notExists(sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' module does not exist.", "ballerina run <module-name>", true); Runtime.getRuntime().exit(1); return; } targetPath = sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); } else { CommandUtil.printError(this.errStream, "invalid Ballerina source path, it should either be a module name in a Ballerina project, a " + "file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension or a .jar file.", "ballerina run {<bal-file> | <module-name>}", true); Runtime.getRuntime().exit(1); return; } sourceRootPath = sourceRootPath.normalize(); sourcePath = sourcePath == null ? null : sourcePath.normalize(); targetPath = targetPath.normalize(); CompilerContext compilerContext = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(compilerContext); options.put(PROJECT_DIR, sourceRootPath.toString()); options.put(OFFLINE, Boolean.toString(this.offline)); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(LOCK_ENABLED, Boolean.toString(true)); options.put(SKIP_TESTS, Boolean.toString(true)); options.put(TEST_ENABLED, "true"); options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag)); BuildContext buildContext = new BuildContext(sourceRootPath, targetPath, sourcePath, compilerContext); buildContext.setOut(this.outStream); buildContext.setErr(this.errStream); boolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE); TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new CreateTargetDirTask()) .addTask(new CompileTask()) .addTask(new CreateBaloTask(), isSingleFileBuild) .addTask(new CreateBirTask()) .addTask(new CopyNativeLibTask()) .addTask(new CreateJarTask(false)) .addTask(new CopyModuleJarTask()) .addTask(new CreateExecutableTask()) .addTask(new PrintExecutablePathTask(), isSingleFileBuild) .addTask(new PrintRunningExecutableTask(!isSingleFileBuild)) .addTask(new RunExecutableTask(programArgs)) .build(); taskExecutor.executeTasks(buildContext); }
"invalid Ballerina source path, it should either be a module name in a Ballerina project, a " +
public void execute() { if (this.helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(Constants.RUN_COMMAND); this.errStream.println(commandUsageInfo); return; } if (this.argList == null || this.argList.size() == 0) { CommandUtil.printError(this.errStream, "no ballerina program given.", "ballerina run {<bal-file> | <module-name> | <executable-jar>}", true); Runtime.getRuntime().exit(1); return; } if (null != this.debugPort) { System.setProperty(SYSTEM_PROP_BAL_DEBUG, this.debugPort); } String[] programArgs = this.getProgramArgs(this.argList); Path sourceRootPath = this.sourceRoot == null ? Paths.get(System.getProperty("user.dir")) : Paths.get(this.sourceRoot); Path sourcePath; Path targetPath; if (this.argList.get(0).endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX)) { if (Paths.get(this.argList.get(0)).isAbsolute()) { sourcePath = Paths.get(this.argList.get(0)); sourceRootPath = sourcePath.getParent(); } else { sourcePath = sourceRootPath.resolve(this.argList.get(0)); } if (Files.notExists(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' Ballerina file does not exist.", null, false); Runtime.getRuntime().exit(1); return; } if (!Files.isRegularFile(sourcePath)) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' is not a Ballerina file. check if it is a symlink or a shortcut.", null, false); Runtime.getRuntime().exit(1); return; } try { targetPath = Files.createTempDirectory("ballerina-run-" + System.nanoTime()); } catch (IOException e) { throw LauncherUtils.createLauncherException("error occurred when creating executable."); } } else if (Files.exists( sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0))) && Files.isDirectory( sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(this.argList.get(0)))) { if (!RepoUtils.isBallerinaProject(sourceRootPath)) { CommandUtil.printError(this.errStream, "you are trying to run a module that is not inside a project.", null, false); Runtime.getRuntime().exit(1); return; } if (Paths.get(argList.get(0)).isAbsolute()) { CommandUtil.printError(this.errStream, "you are trying to run a module by giving the absolute path. you only need give " + "the name of the module.", "ballerina run <module-name>", true); Runtime.getRuntime().exit(1); return; } String moduleName = argList.get(0); if (moduleName.endsWith("/")) { moduleName = moduleName.substring(0, moduleName.length() - 1); } sourcePath = Paths.get(moduleName); if (Files.notExists(sourceRootPath.resolve(ProjectDirConstants.SOURCE_DIR_NAME).resolve(sourcePath))) { CommandUtil.printError(this.errStream, "'" + sourcePath + "' module does not exist.", "ballerina run <module-name>", true); Runtime.getRuntime().exit(1); return; } targetPath = sourceRootPath.resolve(ProjectDirConstants.TARGET_DIR_NAME); } else { CommandUtil.printError(this.errStream, "invalid Ballerina source path. It should either be a name of a module in a Ballerina project, " + "a file with a '" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "' extension, or an executable '" + BLANG_COMPILED_JAR_EXT + "' file.", "ballerina run {<bal-file> | <module-name> | <executable-jar>}", true); Runtime.getRuntime().exit(1); return; } sourceRootPath = sourceRootPath.normalize(); sourcePath = sourcePath == null ? null : sourcePath.normalize(); targetPath = targetPath.normalize(); CompilerContext compilerContext = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(compilerContext); options.put(PROJECT_DIR, sourceRootPath.toString()); options.put(OFFLINE, Boolean.toString(this.offline)); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(LOCK_ENABLED, Boolean.toString(true)); options.put(SKIP_TESTS, Boolean.toString(true)); options.put(TEST_ENABLED, "true"); options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(this.experimentalFlag)); BuildContext buildContext = new BuildContext(sourceRootPath, targetPath, sourcePath, compilerContext); buildContext.setOut(this.outStream); buildContext.setErr(this.errStream); boolean isSingleFileBuild = buildContext.getSourceType().equals(SINGLE_BAL_FILE); TaskExecutor taskExecutor = new TaskExecutor.TaskBuilder() .addTask(new CleanTargetDirTask(), isSingleFileBuild) .addTask(new CreateTargetDirTask()) .addTask(new CompileTask()) .addTask(new CreateBaloTask(), isSingleFileBuild) .addTask(new CreateBirTask()) .addTask(new CopyNativeLibTask()) .addTask(new CreateJarTask(false)) .addTask(new CopyModuleJarTask()) .addTask(new CreateExecutableTask()) .addTask(new PrintExecutablePathTask(), isSingleFileBuild) .addTask(new PrintRunningExecutableTask(!isSingleFileBuild)) .addTask(new RunExecutableTask(programArgs)) .build(); taskExecutor.executeTasks(buildContext); }
class RunCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; @CommandLine.Parameters(description = "Program arguments") private List<String> argList; @CommandLine.Option(names = {"--sourceroot"}, description = "Path to the directory containing source files and modules") private String sourceRoot; @CommandLine.Option(names = {"--help", "-h", "?"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = {"--offline"}, description = "Builds offline without downloading dependencies and " + "then run.") private boolean offline; @CommandLine.Option(names = "--debug", hidden = true) private String debugPort; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private boolean experimentalFlag; public RunCommand() { this.outStream = System.err; this.errStream = System.err; } public RunCommand(PrintStream outStream, PrintStream errStream) { this.outStream = outStream; this.errStream = errStream; } /** * Get the program args from the passed argument list. * * @param argList The argument list. * @return An array of program args. */ private String[] getProgramArgs(List<String> argList) { String[] argsArray = argList.toArray(new String[0]); return Arrays.copyOfRange(argsArray, 1, argsArray.length); } @Override public String getName() { return BallerinaCliCommands.RUN; } @Override public void printLongDesc(StringBuilder out) { out.append("Run command runs a compiled Ballerina program. \n"); out.append("\n"); out.append("If a Ballerina source file or a module is given, \n"); out.append("run command compiles and runs it. \n"); out.append("\n"); out.append("By default, 'ballerina run' executes the main function. \n"); out.append("If the main function is not there, it executes services. \n"); out.append("\n"); out.append("If the -s flag is given, 'ballerina run' executes\n"); out.append("services instead of the main function.\n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina run [--offline]\n" + " [--sourceroot]\n" + " {<balfile> | module-name | executable-jar} [(--key=value)...] " + "[--] [args...] \n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
class RunCommand implements BLauncherCmd { private final PrintStream outStream; private final PrintStream errStream; @CommandLine.Parameters(description = "Program arguments") private List<String> argList; @CommandLine.Option(names = {"--sourceroot"}, description = "Path to the directory containing source files and modules") private String sourceRoot; @CommandLine.Option(names = {"--help", "-h", "?"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = {"--offline"}, description = "Builds offline without downloading dependencies and " + "then run.") private boolean offline; @CommandLine.Option(names = "--debug", hidden = true) private String debugPort; @CommandLine.Option(names = "--experimental", description = "Enable experimental language features.") private boolean experimentalFlag; public RunCommand() { this.outStream = System.err; this.errStream = System.err; } public RunCommand(PrintStream outStream, PrintStream errStream) { this.outStream = outStream; this.errStream = errStream; } /** * Get the program args from the passed argument list. * * @param argList The argument list. * @return An array of program args. */ private String[] getProgramArgs(List<String> argList) { String[] argsArray = argList.toArray(new String[0]); return Arrays.copyOfRange(argsArray, 1, argsArray.length); } @Override public String getName() { return BallerinaCliCommands.RUN; } @Override public void printLongDesc(StringBuilder out) { out.append("Run command runs a compiled Ballerina program. \n"); out.append("\n"); out.append("If a Ballerina source file or a module is given, \n"); out.append("run command compiles and runs it. \n"); out.append("\n"); out.append("By default, 'ballerina run' executes the main function. \n"); out.append("If the main function is not there, it executes services. \n"); out.append("\n"); out.append("If the -s flag is given, 'ballerina run' executes\n"); out.append("services instead of the main function.\n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina run [--offline]\n" + " [--sourceroot]\n" + " {<balfile> | module-name | executable-jar} [(--key=value)...] " + "[--] [args...] \n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } }
> Ah! Not sure if inferring makes the code more readable here though. I can see why it is confusing - I had the type here explicitly but IDEA told me it isn't needed. Let me add it back :)
public SyntheticBean create(SyntheticCreationalContext<SyntheticBean> context) { return new SyntheticBean(context.getInjectedReference(new TypeLiteral<>() { }, All.Literal.INSTANCE)); }
return new SyntheticBean(context.getInjectedReference(new TypeLiteral<>() {
public SyntheticBean create(SyntheticCreationalContext<SyntheticBean> context) { return new SyntheticBean(context.getInjectedReference(new TypeLiteral<List<SomeBean>>() { }, All.Literal.INSTANCE)); }
class SynthBeanCreator implements BeanCreator<SyntheticBean> { @Override }
class SynthBeanCreator implements BeanCreator<SyntheticBean> { @Override }
> Perhaps we should worry more about how many tasks can concurrently access HMS, than how often they access. Maybe we need introduce a new mechanism the source can notify the all join subtask when to update in Flink SQL
private void validateLookupConfigurations() { String partitionInclude = configuration.get(STREAMING_SOURCE_PARTITION_INCLUDE); if (isStreamingSource()) { Preconditions.checkArgument( !configuration.contains(STREAMING_SOURCE_CONSUME_START_OFFSET), String.format( "The '%s' is not supported when set '%s' to 'latest'", STREAMING_SOURCE_CONSUME_START_OFFSET.key(), STREAMING_SOURCE_PARTITION_INCLUDE.key())); Duration monitorInterval = configuration.get(STREAMING_SOURCE_MONITOR_INTERVAL) == null ? DEFAULT_LOOKUP_MONITOR_INTERVAL : configuration.get(STREAMING_SOURCE_MONITOR_INTERVAL); if (monitorInterval.toMillis() < DEFAULT_LOOKUP_MONITOR_INTERVAL.toMillis()) { LOG.warn(String.format( "Currently the recommended value of '%s' is bigger than default value '%s' " + "when set '%s' to 'latest', but actual is '%s', this may produce big pressure to hive metastore.", STREAMING_SOURCE_MONITOR_INTERVAL.key(), DEFAULT_LOOKUP_MONITOR_INTERVAL.toMillis(), STREAMING_SOURCE_PARTITION_INCLUDE.key(), monitorInterval.toMillis())); } hiveTableReloadInterval = monitorInterval; } else { Preconditions.checkArgument( "all".equals(partitionInclude), String.format("The only supported %s for lookup is '%s' in batch source," + " but actual is '%s'", STREAMING_SOURCE_PARTITION_INCLUDE.key(), "all", partitionInclude)); hiveTableReloadInterval = configuration.get(LOOKUP_JOIN_CACHE_TTL); } }
"Currently the recommended value of '%s' is bigger than default value '%s' " +
private void validateLookupConfigurations() { String partitionInclude = configuration.get(STREAMING_SOURCE_PARTITION_INCLUDE); if (isStreamingSource()) { Preconditions.checkArgument( !configuration.contains(STREAMING_SOURCE_CONSUME_START_OFFSET), String.format( "The '%s' is not supported when set '%s' to 'latest'", STREAMING_SOURCE_CONSUME_START_OFFSET.key(), STREAMING_SOURCE_PARTITION_INCLUDE.key())); Duration monitorInterval = configuration.get(STREAMING_SOURCE_MONITOR_INTERVAL) == null ? DEFAULT_LOOKUP_MONITOR_INTERVAL : configuration.get(STREAMING_SOURCE_MONITOR_INTERVAL); if (monitorInterval.toMillis() < DEFAULT_LOOKUP_MONITOR_INTERVAL.toMillis()) { LOG.warn(String.format( "Currently the recommended value of '%s' is at least '%s' when set '%s' to 'latest'," + " but actual is '%s', this may produce big pressure to hive metastore.", STREAMING_SOURCE_MONITOR_INTERVAL.key(), DEFAULT_LOOKUP_MONITOR_INTERVAL.toMillis(), STREAMING_SOURCE_PARTITION_INCLUDE.key(), monitorInterval.toMillis())); } hiveTableReloadInterval = monitorInterval; } else { Preconditions.checkArgument( "all".equals(partitionInclude), String.format("The only supported %s for lookup is '%s' in batch source," + " but actual is '%s'", STREAMING_SOURCE_PARTITION_INCLUDE.key(), "all", partitionInclude)); hiveTableReloadInterval = configuration.get(LOOKUP_JOIN_CACHE_TTL); } }
class HiveLookupTableSource extends HiveTableSource implements LookupTableSource { private static final Logger LOG = LoggerFactory.getLogger(HiveLookupTableSource.class); private static final Duration DEFAULT_LOOKUP_MONITOR_INTERVAL = Duration.ofHours(1L); private final Configuration configuration; private Duration hiveTableReloadInterval; public HiveLookupTableSource( JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable) { super(jobConf, flinkConf, tablePath, catalogTable); this.configuration = new Configuration(); catalogTable.getOptions().forEach(configuration::setString); validateLookupConfigurations(); } @Override public LookupRuntimeProvider getLookupRuntimeProvider(LookupContext context) { return TableFunctionProvider.of(getLookupFunction(context.getKeys())); } @VisibleForTesting TableFunction<RowData> getLookupFunction(int[][] keys) { int[] keyIndices = new int[keys.length]; int i = 0; for (int[] key : keys) { if (key.length > 1) { throw new UnsupportedOperationException("Hive lookup can not support nested key now."); } keyIndices[i] = key[0]; i++; } return getLookupFunction(keyIndices); } private TableFunction<RowData> getLookupFunction(int[] keys) { final String defaultPartitionName = jobConf.get(HiveConf.ConfVars.DEFAULTPARTITIONNAME.varname, HiveConf.ConfVars.DEFAULTPARTITIONNAME.defaultStrVal); PartitionFetcher.Context<HiveTablePartition> fetcherContext = new HiveTablePartitionFetcherContext( tablePath, hiveShim, new JobConfWrapper(jobConf), catalogTable.getPartitionKeys(), getProducedTableSchema().getFieldDataTypes(), getProducedTableSchema().getFieldNames(), configuration, defaultPartitionName); final PartitionFetcher<HiveTablePartition> partitionFetcher; final ObjectPath tableFullPath = tablePath; if (catalogTable.getPartitionKeys().isEmpty()) { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); partValueList.add(context .getPartition(new ArrayList<>()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); return partValueList; }; } else if (isStreamingSource()) { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); List<PartitionFetcher.Context.ComparablePartitionValue> comparablePartitionValues = context.getComparablePartitionValueList(); if (comparablePartitionValues.size() > 0) { comparablePartitionValues.sort((o1, o2) -> o2.getComparator().compareTo(o1.getComparator())); PartitionFetcher.Context.ComparablePartitionValue maxPartition = comparablePartitionValues.get(0); partValueList.add(context .getPartition((List<String>) maxPartition.getPartitionValue()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); } else { throw new IllegalArgumentException( String.format("At least one partition is required when set '%s' to 'latest' in temporal join," + " but actual partition number is '%s' for hive table %s", STREAMING_SOURCE_PARTITION_INCLUDE.key(), comparablePartitionValues.size(), tableFullPath)); } return partValueList; }; } else { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); List<PartitionFetcher.Context.ComparablePartitionValue> comparablePartitionValues = context.getComparablePartitionValueList(); for (PartitionFetcher.Context.ComparablePartitionValue comparablePartitionValue : comparablePartitionValues) { partValueList.add(context .getPartition((List<String>) comparablePartitionValue.getPartitionValue()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); } return partValueList; }; } PartitionReader<HiveTablePartition, RowData> partitionReader = new HiveInputFormatPartitionReader( jobConf, hiveVersion, tablePath, getProducedTableSchema().getFieldDataTypes(), getProducedTableSchema().getFieldNames(), catalogTable.getPartitionKeys(), projectedFields, flinkConf.get(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER)); return new FileSystemLookupFunction<>( partitionFetcher, fetcherContext, partitionReader, (RowType) getProducedTableSchema().toRowDataType().getLogicalType(), keys, hiveTableReloadInterval); } /** * PartitionFetcher.Context for {@link HiveTablePartition}. */ static class HiveTablePartitionFetcherContext extends HivePartitionFetcherContextBase<HiveTablePartition> { private static final long serialVersionUID = 1L; public HiveTablePartitionFetcherContext( ObjectPath tablePath, HiveShim hiveShim, JobConfWrapper confWrapper, List<String> partitionKeys, DataType[] fieldTypes, String[] fieldNames, Configuration configuration, String defaultPartitionName) { super( tablePath, hiveShim, confWrapper, partitionKeys, fieldTypes, fieldNames, configuration, defaultPartitionName); } @Override public Optional<HiveTablePartition> getPartition(List<String> partValues) throws Exception { Preconditions.checkArgument( partitionKeys.size() == partValues.size(), String.format( "The partition keys length should equal to partition values length, " + "but partition keys length is %s and partition values length is %s", partitionKeys.size(), partValues.size())); if (partitionKeys.isEmpty()) { return Optional.of(new HiveTablePartition(tableSd, tableProps)); } else { try { Partition partition = metaStoreClient.getPartition( tablePath.getDatabaseName(), tablePath.getObjectName(), partValues); HiveTablePartition hiveTablePartition = HivePartitionUtils.toHiveTablePartition( partitionKeys, fieldNames, fieldTypes, hiveShim, tableProps, defaultPartitionName, partition); return Optional.of(hiveTablePartition); } catch (NoSuchObjectException e) { return Optional.empty(); } } } } }
class HiveLookupTableSource extends HiveTableSource implements LookupTableSource { private static final Logger LOG = LoggerFactory.getLogger(HiveLookupTableSource.class); private static final Duration DEFAULT_LOOKUP_MONITOR_INTERVAL = Duration.ofHours(1L); private final Configuration configuration; private Duration hiveTableReloadInterval; public HiveLookupTableSource( JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable) { super(jobConf, flinkConf, tablePath, catalogTable); this.configuration = new Configuration(); catalogTable.getOptions().forEach(configuration::setString); validateLookupConfigurations(); } @Override public LookupRuntimeProvider getLookupRuntimeProvider(LookupContext context) { return TableFunctionProvider.of(getLookupFunction(context.getKeys())); } @VisibleForTesting TableFunction<RowData> getLookupFunction(int[][] keys) { int[] keyIndices = new int[keys.length]; int i = 0; for (int[] key : keys) { if (key.length > 1) { throw new UnsupportedOperationException("Hive lookup can not support nested key now."); } keyIndices[i] = key[0]; i++; } return getLookupFunction(keyIndices); } private TableFunction<RowData> getLookupFunction(int[] keys) { final String defaultPartitionName = jobConf.get(HiveConf.ConfVars.DEFAULTPARTITIONNAME.varname, HiveConf.ConfVars.DEFAULTPARTITIONNAME.defaultStrVal); PartitionFetcher.Context<HiveTablePartition> fetcherContext = new HiveTablePartitionFetcherContext( tablePath, hiveShim, new JobConfWrapper(jobConf), catalogTable.getPartitionKeys(), getProducedTableSchema().getFieldDataTypes(), getProducedTableSchema().getFieldNames(), configuration, defaultPartitionName); final PartitionFetcher<HiveTablePartition> partitionFetcher; final ObjectPath tableFullPath = tablePath; if (catalogTable.getPartitionKeys().isEmpty()) { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); partValueList.add(context .getPartition(new ArrayList<>()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); return partValueList; }; } else if (isStreamingSource()) { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); List<PartitionFetcher.Context.ComparablePartitionValue> comparablePartitionValues = context.getComparablePartitionValueList(); if (comparablePartitionValues.size() > 0) { comparablePartitionValues.sort((o1, o2) -> o2.getComparator().compareTo(o1.getComparator())); PartitionFetcher.Context.ComparablePartitionValue maxPartition = comparablePartitionValues.get(0); partValueList.add(context .getPartition((List<String>) maxPartition.getPartitionValue()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); } else { throw new IllegalArgumentException( String.format("At least one partition is required when set '%s' to 'latest' in temporal join," + " but actual partition number is '%s' for hive table %s", STREAMING_SOURCE_PARTITION_INCLUDE.key(), comparablePartitionValues.size(), tableFullPath)); } return partValueList; }; } else { partitionFetcher = context -> { List<HiveTablePartition> partValueList = new ArrayList<>(); List<PartitionFetcher.Context.ComparablePartitionValue> comparablePartitionValues = context.getComparablePartitionValueList(); for (PartitionFetcher.Context.ComparablePartitionValue comparablePartitionValue : comparablePartitionValues) { partValueList.add(context .getPartition((List<String>) comparablePartitionValue.getPartitionValue()) .orElseThrow(() -> new IllegalArgumentException( String.format("Fetch partition fail for hive table %s.", tableFullPath))) ); } return partValueList; }; } PartitionReader<HiveTablePartition, RowData> partitionReader = new HiveInputFormatPartitionReader( jobConf, hiveVersion, tablePath, getProducedTableSchema().getFieldDataTypes(), getProducedTableSchema().getFieldNames(), catalogTable.getPartitionKeys(), projectedFields, flinkConf.get(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER)); return new FileSystemLookupFunction<>( partitionFetcher, fetcherContext, partitionReader, (RowType) getProducedTableSchema().toRowDataType().getLogicalType(), keys, hiveTableReloadInterval); } /** * PartitionFetcher.Context for {@link HiveTablePartition}. */ static class HiveTablePartitionFetcherContext extends HivePartitionFetcherContextBase<HiveTablePartition> { private static final long serialVersionUID = 1L; public HiveTablePartitionFetcherContext( ObjectPath tablePath, HiveShim hiveShim, JobConfWrapper confWrapper, List<String> partitionKeys, DataType[] fieldTypes, String[] fieldNames, Configuration configuration, String defaultPartitionName) { super( tablePath, hiveShim, confWrapper, partitionKeys, fieldTypes, fieldNames, configuration, defaultPartitionName); } @Override public Optional<HiveTablePartition> getPartition(List<String> partValues) throws Exception { Preconditions.checkArgument( partitionKeys.size() == partValues.size(), String.format( "The partition keys length should equal to partition values length, " + "but partition keys length is %s and partition values length is %s", partitionKeys.size(), partValues.size())); if (partitionKeys.isEmpty()) { return Optional.of(new HiveTablePartition(tableSd, tableProps)); } else { try { Partition partition = metaStoreClient.getPartition( tablePath.getDatabaseName(), tablePath.getObjectName(), partValues); HiveTablePartition hiveTablePartition = HivePartitionUtils.toHiveTablePartition( partitionKeys, fieldNames, fieldTypes, hiveShim, tableProps, defaultPartitionName, partition); return Optional.of(hiveTablePartition); } catch (NoSuchObjectException e) { return Optional.empty(); } } } } }
Let's invert this case and just have if -> throw and everything else outside of an if/else block
private void unpackAndValidateId(String keyId) { if (keyId != null && keyId.length() > 0) { try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); version = (tokens.length >= 4 ? tokens[3] : null); if(Strings.isNullOrEmpty(endpoint)) { throw new IllegalArgumentException("Key endpoint in key id is invalid"); } else if (Strings.isNullOrEmpty(keyName)) { throw new IllegalArgumentException("Key name in key id is invalid"); } else if(Strings.isNullOrEmpty(version)) { throw new IllegalArgumentException("Key version in key id is invalid"); } } catch (MalformedURLException e) { e.printStackTrace(); } } else { throw new IllegalArgumentException("Key Id is invalid"); } }
if (keyId != null && keyId.length() > 0) {
private void unpackAndValidateId(String keyId) { if (ImplUtils.isNullOrEmpty(keyId)) { throw new IllegalArgumentException("Key Id is invalid"); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); if (Strings.isNullOrEmpty(endpoint)) { throw new IllegalArgumentException("Key endpoint in key id is invalid"); } else if (Strings.isNullOrEmpty(keyName)) { throw new IllegalArgumentException("Key name in key id is invalid"); } else if (Strings.isNullOrEmpty(version)) { throw new IllegalArgumentException("Key version in key id is invalid"); } } catch (MalformedURLException e) { throw new IllegalArgumentException("The key identifier is malformed", e); } }
class CryptographyAsyncClient { private JsonWebKey key; private CryptographyService service; private String version; private EcKeyCryptographyClient ecKeyCryptographyClient; private RsaKeyCryptographyClient rsaKeyCryptographyClient; private CryptographyServiceClient cryptographyServiceClient; private SymmetricKeyCryptographyClient symmetricKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the JsonWebKey to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ CryptographyAsyncClient(JsonWebKey key, HttpPipeline pipeline) { Objects.requireNonNull(key); this.key = key; service = RestProxy.create(CryptographyService.class, pipeline); if(!Strings.isNullOrEmpty(key.kid())) { unpackAndValidateId(key.kid()); cryptographyServiceClient = new CryptographyServiceClient(key.kid(), service); } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param kid THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ CryptographyAsyncClient(String kid, HttpPipeline pipeline) { unpackAndValidateId(kid); service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(kid, service); ecKeyCryptographyClient = new EcKeyCryptographyClient(cryptographyServiceClient); rsaKeyCryptographyClient = new RsaKeyCryptographyClient(cryptographyServiceClient); symmetricKeyCryptographyClient = new SymmetricKeyCryptographyClient(cryptographyServiceClient); } private void initializeCryptoClients() { switch(key.kty()){ case RSA: case RSA_HSM: rsaKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); break; case EC: case EC_HSM: ecKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); break; case OCT: symmetricKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); break; default: throw new IllegalArgumentException(String.format("The Json Web Key Type: %s is not supported.", key.kty().toString())); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires the {@code keys/get} permission. * * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Key>> getKey() { return withContext(context -> getKey(context)); } Mono<Response<Key>> getKey(Context context) { return cryptographyServiceClient.getKey(context); } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. The encrypt * operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public portion of the key is used * for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult */ public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { return withContext(context -> encrypt(algorithm, plaintext, context, null, null)); } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. The encrypt * operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public portion of the key is used * for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @param iv The initialization vector * @param authenticationData The authentication data * @throws ResourceNotFoundException if the key cannot be found for encryption. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult */ public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, byte[] iv, byte[] authenticationData) { return withContext(context -> encrypt(algorithm, plaintext, context, iv, authenticationData)); } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context, byte[] iv, byte[] authenticationData) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.ENCRYPT)){ return Mono.error(new UnsupportedOperationException(String.format("Encrypt Operation is not supported for key with id %s", key.kid()))); } switch(key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.encryptAsync(algorithm, plaintext, iv, authenticationData, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.encryptAsync(algorithm, plaintext, iv, authenticationData, context, key); case OCT: return symmetricKeyCryptographyClient.encryptAsync(algorithm, plaintext, iv, authenticationData, context, key); default: throw new UnsupportedOperationException(String.format("Encrypt Async is not allowed for Key Type: %s", key.kty().toString())); } } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a single block of data may be * decrypted, the size of this block is dependent on the target key and the algorithm to be used. The decrypt operation * is supported for both asymmetric and symmetric keys. This operation requires the keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @return A {@link Mono} containing the decrypted blob. */ public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { return withContext(context -> decrypt(algorithm, cipherText, null, null, null, context)); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a single block of data may be * decrypted, the size of this block is dependent on the target key and the algorithm to be used. The decrypt operation * is supported for both asymmetric and symmetric keys. This operation requires the keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @return A {@link Mono} containing the decrypted blob. */ public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, byte[] iv, byte[] authenticationData, byte[] authenticationTag) { return withContext(context -> decrypt(algorithm, cipherText, iv, authenticationData, authenticationTag, context)); } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, byte[] iv, byte[] authenticationData, byte[] authenticationTag, Context context) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.DECRYPT)){ return Mono.error(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for key with id %s", key.kid()))); } switch(key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.decryptAsync(algorithm, cipherText, iv, authenticationData, authenticationTag, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.decryptAsync(algorithm, cipherText, iv, authenticationData, authenticationTag, context, key); case OCT: return symmetricKeyCryptographyClient.decryptAsync(algorithm, cipherText, iv, authenticationData, authenticationTag, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Decrypt operation is not supported for Key Type: %s", key.kty().toString()))); } } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @throws ResourceNotFoundException if the key cannot be found for signing. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult */ public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { return withContext(context -> sign(algorithm, digest, context)); } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.SIGN)){ return Mono.error(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key with id %s", key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.signAsync(algorithm, digest, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.signAsync(algorithm, digest, context, key); case OCT: return symmetricKeyCryptographyClient.signAsync(algorithm, digest, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Sign operaiton is not supported for Key Type: %s", key.kty().toString()))); } } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the signature. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. */ public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { return withContext(context -> verify(algorithm, digest, signature, context)); } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.VERIFY)){ return Mono.error(new UnsupportedOperationException(String.format("Verify Operation is not allowed for key with id %s", key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); case OCT: return symmetricKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Verify operation is not supported for Key Type: %s", key.kty().toString()))); } } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified key content. Possible values include: * {@link KeyWrapAlgorithm * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @return A {@link Mono} containing a {@link KeyWrapResult} whose {@link KeyWrapResult */ public Mono<KeyWrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { return withContext(context -> wrapKey(algorithm, key, context)); } Mono<KeyWrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.WRAP_KEY)){ return Mono.error(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for key with id %s", this.key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); case EC: case EC_HSM: return ecKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); case OCT: return symmetricKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); default: return Mono.error(new UnsupportedOperationException(String.format("Encrypt Async is not supported for Key Type: %s", this.key.kty().toString()))); } } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @return A {@link Mono} containing a the unwrapped key content. */ public Mono<KeyUnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } Mono<KeyUnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.WRAP_KEY)){ return Mono.error(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed for key with id %s", this.key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); case OCT: return symmetricKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Encrypt Async is not supported for Key Type: %s", key.kty().toString()))); } } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @throws ResourceNotFoundException if the key cannot be found for signing. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult */ public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { return withContext(context -> signData(algorithm, data, context)); } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.SIGN)){ return Mono.error(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key with id %s", this.key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.signDataAsync(algorithm, data, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.signDataAsync(algorithm, data, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Encrypt Async is not supported for Key Type: %s", key.kty().toString()))); } } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the signature. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @return The {@link Boolean} indicating the signature verification result. */ public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { return withContext(context -> verifyData(algorithm, data, signature, context)); } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm); boolean keyAvailableLocally = ensureValidKeyAvailable(); if(!keyAvailableLocally) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.VERIFY)){ return Mono.error(new UnsupportedOperationException(String.format("Verify Operation is not allowed for key with id %s", this.key.kid()))); } switch(this.key.kty()){ case RSA: case RSA_HSM: return rsaKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); case EC: case EC_HSM: return ecKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); default: return Mono.error(new UnsupportedOperationException(String.format("Encrypt Async is not supported for Key Type: %s", key.kty().toString()))); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { if (operations.contains(keyOperation)) { return true; } return false; } private boolean ensureValidKeyAvailable() { boolean keyAvailableLocally = true; if(key == null) { try { this.key = getKey().block().value().keyMaterial(); keyAvailableLocally = this.key.isValid(); } catch (HttpResponseException e) { logger.info("Failed to retrieve key from key vault"); keyAvailableLocally = false; } } return keyAvailableLocally; } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: private JsonWebKey key; private final CryptographyService service; private final CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the JsonWebKey to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ CryptographyAsyncClient(JsonWebKey key, HttpPipeline pipeline) { Objects.requireNonNull(key); if (!key.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (key.keyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.kty() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = key; service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.kid())) { unpackAndValidateId(key.kid()); cryptographyServiceClient = new CryptographyServiceClient(key.kid(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param kid THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ CryptographyAsyncClient(String kid, HttpPipeline pipeline) { unpackAndValidateId(kid); service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(kid, service); this.key = null; } private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } switch (key.kty()) { case RSA: case RSA_HSM: localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); break; case EC: case EC_HSM: localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); break; case OCT: localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); break; default: throw new IllegalArgumentException(String.format("The Json Web Key Type: %s is not supported.", key.kty().toString())); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Key>> getKeyWithResponse() { return withContext(context -> getKeyWithResponse(context)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. * @return A {@link Mono} containing the requested {@link Key key}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Key> getKey() { return getKeyWithResponse().flatMap(FluxUtil::toMono); } Mono<Response<Key>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. The encrypt * operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public portion of the key is used * for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the specified {@code plaintext}. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult */ public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { return withContext(context -> encrypt(algorithm, plaintext, context, null, null)); } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. The encrypt * operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public portion of the key is used * for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the specified {@code plaintext}. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @param iv The initialization vector * @param authenticationData The authentication data * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult */ public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, byte[] iv, byte[] authenticationData) { return withContext(context -> encrypt(algorithm, plaintext, context, iv, authenticationData)); } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context, byte[] iv, byte[] authenticationData) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.ENCRYPT)) { return Mono.error(new UnsupportedOperationException(String.format("Encrypt Operation is missing permission/not supported for key with id %s", key.kid()))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, iv, authenticationData, context, key); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a single block of data may be * decrypted, the size of this block is dependent on the target key and the algorithm to be used. The decrypt operation * is supported for both asymmetric and symmetric keys. This operation requires the keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. * @return A {@link Mono} containing the decrypted blob. */ public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { return withContext(context -> decrypt(algorithm, cipherText, null, null, null, context)); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a single block of data may be * decrypted, the size of this block is dependent on the target key and the algorithm to be used. The decrypt operation * is supported for both asymmetric and symmetric keys. This operation requires the keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the specified encrypted content. Possible values * for assymetric keys include: {@link EncryptionAlgorithm * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @param iv The initialization vector. * @param authenticationData The authentication data. * @param authenticationTag The authentication tag. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. * @return A {@link Mono} containing the decrypted blob. */ public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, byte[] iv, byte[] authenticationData, byte[] authenticationTag) { return withContext(context -> decrypt(algorithm, cipherText, iv, authenticationData, authenticationTag, context)); } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, byte[] iv, byte[] authenticationData, byte[] authenticationTag, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.DECRYPT)) { return Mono.error(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for key with id %s", key.kid()))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, iv, authenticationData, authenticationTag, context, key); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.sign} * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult */ public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { return withContext(context -> sign(algorithm, digest, context)); } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.SIGN)) { return Mono.error(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key with id %s", key.kid()))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the signature. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.verify} * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. */ public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { return withContext(context -> verify(algorithm, digest, signature, context)); } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.VERIFY)) { return Mono.error(new UnsupportedOperationException(String.format("Verify Operation is not allowed for key with id %s", key.kid()))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified key content. Possible values include: * {@link KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.wrap-key} * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws NullPointerException if {@code algorithm} or {@code key} is null. * @return A {@link Mono} containing a {@link KeyWrapResult} whose {@link KeyWrapResult */ public Mono<KeyWrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { return withContext(context -> wrapKey(algorithm, key, context)); } Mono<KeyWrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for key with id %s", this.key.kid()))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.unwrap-key} * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. * @return A {@link Mono} containing a the unwrapped key content. */ public Mono<KeyUnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } Mono<KeyUnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed for key with id %s", this.key.kid()))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.sign-data} * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws NullPointerException if {@code algorithm} or {@code data} is null. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult */ public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { return withContext(context -> signData(algorithm, data, context)); } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.SIGN)) { return Mono.error(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key with id %s", this.key.kid()))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the signature. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.verify-data} * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. * @return The {@link Boolean} indicating the signature verification result. */ public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { return withContext(context -> verifyData(algorithm, data, signature, context)); } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); boolean keyAvailableLocally = ensureValidKeyAvailable(); if (!keyAvailableLocally) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.keyOps(), KeyOperation.VERIFY)) { return Mono.error(new UnsupportedOperationException(String.format("Verify Operation is not allowed for key with id %s", this.key.kid()))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private boolean ensureValidKeyAvailable() { boolean keyAvailableLocally = true; if (this.key == null) { try { this.key = getKey().block().keyMaterial(); keyAvailableLocally = this.key.isValid(); initializeCryptoClients(); } catch (HttpResponseException | NullPointerException e) { logger.info("Failed to retrieve key from key vault"); keyAvailableLocally = false; } } return keyAvailableLocally; } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } }
I'm curious why there isn't any sort of TokenCredential or AzureSasCredential policy here. Does this service not have any sort of authentication? I can't find it in the .NET repo either
public ModelsRepositoryAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'endpoint' cannot be null"); Configuration buildConfiguration = this.configuration; if (buildConfiguration == null) { buildConfiguration = Configuration.getGlobalConfiguration().clone(); } ModelsRepositoryServiceVersion serviceVersion = this.serviceVersion; if (serviceVersion == null) { serviceVersion = ModelsRepositoryServiceVersion.getLatest(); } RetryPolicy retryPolicy = this.retryPolicy; if (retryPolicy == null) { retryPolicy = DEFAULT_RETRY_POLICY; } if (this.httpPipeline == null) { this.httpPipeline = buildPipeline( this.endpoint, this.httpLogOptions, this.clientOptions, this.httpClient, this.additionalPolicies, retryPolicy, buildConfiguration, this.properties); } return new ModelsRepositoryAsyncClient(this.endpoint, this.httpPipeline, serviceVersion, this.jsonSerializer); }
Objects.requireNonNull(endpoint, "'endpoint' cannot be null");
public ModelsRepositoryAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'endpoint' cannot be null"); Configuration buildConfiguration = this.configuration; if (buildConfiguration == null) { buildConfiguration = Configuration.getGlobalConfiguration().clone(); } ModelsRepositoryServiceVersion serviceVersion = this.serviceVersion; if (serviceVersion == null) { serviceVersion = ModelsRepositoryServiceVersion.getLatest(); } RetryPolicy retryPolicy = this.retryPolicy; if (retryPolicy == null) { retryPolicy = DEFAULT_RETRY_POLICY; } if (this.httpPipeline == null) { this.httpPipeline = buildPipeline( this.endpoint, this.httpLogOptions, this.clientOptions, this.httpClient, this.additionalPolicies, retryPolicy, buildConfiguration, this.properties); } return new ModelsRepositoryAsyncClient(this.endpoint, this.httpPipeline, serviceVersion, this.jsonSerializer); }
class ModelsRepositoryClientBuilder { private static final String MODELS_REPOSITORY_PROPERTIES = "azure-iot-modelsrepository.properties"; private static final String SDK_NAME = "name"; private static final String SDK_VERSION = "version"; private final List<HttpPipelinePolicy> additionalPolicies; private String endpoint; private ModelsRepositoryServiceVersion serviceVersion; private ClientOptions clientOptions; private HttpPipeline httpPipeline; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private RetryPolicy retryPolicy; private JsonSerializer jsonSerializer; private static final String retryAfterHeader = null; private static final ChronoUnit retryAfterTimeUnit = null; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy(retryAfterHeader, retryAfterTimeUnit); private final Map<String, String> properties; private Configuration configuration; /** * The public constructor for ModelsRepositoryClientBuilder */ public ModelsRepositoryClientBuilder() { additionalPolicies = new ArrayList<>(); properties = CoreUtils.getProperties(MODELS_REPOSITORY_PROPERTIES); httpLogOptions = new HttpLogOptions(); } private static HttpPipeline buildPipeline(String endpoint, HttpLogOptions httpLogOptions, ClientOptions clientOptions, HttpClient httpClient, List<HttpPipelinePolicy> additionalPolicies, RetryPolicy retryPolicy, Configuration configuration, Map<String, String> properties) { List<HttpPipelinePolicy> policies = new ArrayList<>(); String clientName = properties.getOrDefault(SDK_NAME, "UnknownName"); String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion"); String applicationId = clientOptions == null ? httpLogOptions.getApplicationId() : clientOptions.getApplicationId(); policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration)); policies.add(new RequestIdPolicy()); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy); policies.add(new AddDatePolicy()); policies.addAll(additionalPolicies); if (clientOptions != null) { List<HttpHeader> httpHeaderList = new ArrayList<>(); clientOptions.getHeaders().forEach(header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue()))); policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList))); } HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .build(); } /** * Create a {@link ModelsRepotioryClient} based on the builder settings. * * @return the created synchronous ModelsRepotioryClient */ public ModelsRepotioryClient buildClient() { return new ModelsRepotioryClient(buildAsyncClient()); } /** * Create a {@link ModelsRepositoryAsyncClient} based on the builder settings. * * @return the created asynchronous ModelsRepositoryAsyncClient */ /** * Set the service endpoint that the built client will communicate with. This field is mandatory to set. * * @param endpoint URL of the service. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder endpoint(String endpoint) { this.endpoint = endpoint; return this; } /** * Sets the {@link ModelsRepositoryServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version of the client library will have the result of potentially moving to a newer service version. * <p> * Targeting a specific service version may also mean that the service will return an error for newer APIs. * * @param serviceVersion The service API version to use. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder serviceVersion(ModelsRepositoryServiceVersion serviceVersion) { this.serviceVersion = serviceVersion; return this; } /** * Sets the {@link HttpClient} to use for sending a receiving requests to and from the service. * * @param httpClient HttpClient to use for requests. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder httpClient(HttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Sets the {@link HttpLogOptions} for service requests. * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. * @throws NullPointerException If {@code httpLogOptions} is {@code null}. */ public ModelsRepositoryClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Adds a pipeline policy to apply on each request sent. The policy will be added after the retry policy. If * the method is called multiple times, all policies will be added and their order preserved. * * @param pipelinePolicy a pipeline policy * @return the updated ModelsRepositoryClientBuilder instance for fluent building. * @throws NullPointerException If {@code pipelinePolicy} is {@code null}. */ public ModelsRepositoryClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) { this.additionalPolicies.add(Objects.requireNonNull(pipelinePolicy, "'pipelinePolicy' cannot be null")); return this; } /** * Sets the {@link HttpPipelinePolicy} that is used as the retry policy for each request that is sent. * * The default retry policy will be used if not provided. The default retry policy is {@link RetryPolicy * For implementing custom retry logic, see {@link RetryPolicy} as an example. * * @param retryPolicy the retry policy applied to each request. * @return The updated ConfigurationClientBuilder object. */ public ModelsRepositoryClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link HttpPipeline} to use for the service client. * <p> * If {@code pipeline} is set, all other settings are ignored, aside from {@link * * @param httpPipeline HttpPipeline to use for sending service requests and receiving responses. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder pipeline(HttpPipeline httpPipeline) { this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * @return The updated ModelsRepositoryClientBuilder object for fluent building. */ public ModelsRepositoryClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Custom JSON serializer that is used to handle model types that are not contained in the Azure Models Repository library. * * @param jsonSerializer The serializer to deserialize response payloads into user defined models. * @return The updated ModelsRepositoryClientBuilder object. */ public ModelsRepositoryClientBuilder serializer(JsonSerializer jsonSerializer) { this.jsonSerializer = jsonSerializer; return this; } /** * Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting an * {@code applicationId} using {@link ClientOptions * the {@link UserAgentPolicy} for telemetry/monitoring purposes. * * <p>More About <a href="https: * * @param clientOptions the {@link ClientOptions} to be set on the client. * @return The updated KeyClientBuilder object. */ public ModelsRepositoryClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } }
class ModelsRepositoryClientBuilder { private static final String MODELS_REPOSITORY_PROPERTIES = "azure-iot-modelsrepository.properties"; private static final String SDK_NAME = "name"; private static final String SDK_VERSION = "version"; private final List<HttpPipelinePolicy> additionalPolicies; private String endpoint; private ModelsRepositoryServiceVersion serviceVersion; private ClientOptions clientOptions; private HttpPipeline httpPipeline; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private RetryPolicy retryPolicy; private JsonSerializer jsonSerializer; private static final String retryAfterHeader = null; private static final ChronoUnit retryAfterTimeUnit = null; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy(retryAfterHeader, retryAfterTimeUnit); private final Map<String, String> properties; private Configuration configuration; /** * The public constructor for ModelsRepositoryClientBuilder */ public ModelsRepositoryClientBuilder() { additionalPolicies = new ArrayList<>(); properties = CoreUtils.getProperties(MODELS_REPOSITORY_PROPERTIES); httpLogOptions = new HttpLogOptions(); } private static HttpPipeline buildPipeline(String endpoint, HttpLogOptions httpLogOptions, ClientOptions clientOptions, HttpClient httpClient, List<HttpPipelinePolicy> additionalPolicies, RetryPolicy retryPolicy, Configuration configuration, Map<String, String> properties) { List<HttpPipelinePolicy> policies = new ArrayList<>(); String clientName = properties.getOrDefault(SDK_NAME, "UnknownName"); String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion"); String applicationId = clientOptions == null ? httpLogOptions.getApplicationId() : clientOptions.getApplicationId(); policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration)); policies.add(new RequestIdPolicy()); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy); policies.add(new AddDatePolicy()); policies.addAll(additionalPolicies); if (clientOptions != null) { List<HttpHeader> httpHeaderList = new ArrayList<>(); clientOptions.getHeaders().forEach(header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue()))); policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList))); } HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .build(); } /** * Create a {@link ModelsRepotioryClient} based on the builder settings. * * @return the created synchronous ModelsRepotioryClient */ public ModelsRepotioryClient buildClient() { return new ModelsRepotioryClient(buildAsyncClient()); } /** * Create a {@link ModelsRepositoryAsyncClient} based on the builder settings. * * @return the created asynchronous ModelsRepositoryAsyncClient */ /** * Set the service endpoint that the built client will communicate with. This field is mandatory to set. * * @param endpoint URL of the service. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder endpoint(String endpoint) { this.endpoint = endpoint; return this; } /** * Sets the {@link ModelsRepositoryServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version of the client library will have the result of potentially moving to a newer service version. * <p> * Targeting a specific service version may also mean that the service will return an error for newer APIs. * * @param serviceVersion The service API version to use. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder serviceVersion(ModelsRepositoryServiceVersion serviceVersion) { this.serviceVersion = serviceVersion; return this; } /** * Sets the {@link HttpClient} to use for sending a receiving requests to and from the service. * * @param httpClient HttpClient to use for requests. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder httpClient(HttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Sets the {@link HttpLogOptions} for service requests. * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. * @throws NullPointerException If {@code httpLogOptions} is {@code null}. */ public ModelsRepositoryClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Adds a pipeline policy to apply on each request sent. The policy will be added after the retry policy. If * the method is called multiple times, all policies will be added and their order preserved. * * @param pipelinePolicy a pipeline policy * @return the updated ModelsRepositoryClientBuilder instance for fluent building. * @throws NullPointerException If {@code pipelinePolicy} is {@code null}. */ public ModelsRepositoryClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) { this.additionalPolicies.add(Objects.requireNonNull(pipelinePolicy, "'pipelinePolicy' cannot be null")); return this; } /** * Sets the {@link HttpPipelinePolicy} that is used as the retry policy for each request that is sent. * * The default retry policy will be used if not provided. The default retry policy is {@link RetryPolicy * For implementing custom retry logic, see {@link RetryPolicy} as an example. * * @param retryPolicy the retry policy applied to each request. * @return The updated ConfigurationClientBuilder object. */ public ModelsRepositoryClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link HttpPipeline} to use for the service client. * <p> * If {@code pipeline} is set, all other settings are ignored, aside from {@link * * @param httpPipeline HttpPipeline to use for sending service requests and receiving responses. * @return the updated ModelsRepositoryClientBuilder instance for fluent building. */ public ModelsRepositoryClientBuilder pipeline(HttpPipeline httpPipeline) { this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * @return The updated ModelsRepositoryClientBuilder object for fluent building. */ public ModelsRepositoryClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Custom JSON serializer that is used to handle model types that are not contained in the Azure Models Repository library. * * @param jsonSerializer The serializer to deserialize response payloads into user defined models. * @return The updated ModelsRepositoryClientBuilder object. */ public ModelsRepositoryClientBuilder serializer(JsonSerializer jsonSerializer) { this.jsonSerializer = jsonSerializer; return this; } /** * Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting an * {@code applicationId} using {@link ClientOptions * the {@link UserAgentPolicy} for telemetry/monitoring purposes. * * <p>More About <a href="https: * * @param clientOptions the {@link ClientOptions} to be set on the client. * @return The updated KeyClientBuilder object. */ public ModelsRepositoryClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } }
I think this will be best left for the user, as latency can change and the limit can be changed by GCP project owners.
public void processElement(ProcessContext context) { Iterable<AnnotateImageRequest> value = context.element().getValue(); ArrayList<AnnotateImageRequest> annotateImageRequests = new ArrayList<>(); value.forEach(annotateImageRequests::add); context.output(annotateImageRequests); }
ArrayList<AnnotateImageRequest> annotateImageRequests = new ArrayList<>();
public void processElement(ProcessContext context) { context.output(getResponse(Objects.requireNonNull(context.element().getValue()))); }
class PerformImageAnnotation extends DoFn<List<AnnotateImageRequest>, List<AnnotateImageResponse>> { private ImageAnnotatorClient imageAnnotatorClient; public PerformImageAnnotation() {} /** * Parametrized constructor to make mock injection easier in testing. * * @param imageAnnotatorClient */ public PerformImageAnnotation(ImageAnnotatorClient imageAnnotatorClient) { this.imageAnnotatorClient = imageAnnotatorClient; } @StartBundle public void startBundle() throws IOException { imageAnnotatorClient = ImageAnnotatorClient.create(); } @Teardown public void teardown() { imageAnnotatorClient.close(); } @ProcessElement /** * Performs the call itself. Default access for testing. * * @param requests request list. * @return response list. */ List<AnnotateImageResponse> getResponse(List<AnnotateImageRequest> requests) { BatchAnnotateImagesResponse batchAnnotateImagesResponse = imageAnnotatorClient.batchAnnotateImages(requests); return batchAnnotateImagesResponse.getResponsesList(); } }
class PerformImageAnnotation extends DoFn<KV<Integer, Iterable<AnnotateImageRequest>>, List<AnnotateImageResponse>> { private transient ImageAnnotatorClient imageAnnotatorClient; public PerformImageAnnotation() {} /** * Parametrized constructor to make mock injection easier in testing. * * @param imageAnnotatorClient */ public PerformImageAnnotation(ImageAnnotatorClient imageAnnotatorClient) { this.imageAnnotatorClient = imageAnnotatorClient; } @Setup public void setup() throws IOException { imageAnnotatorClient = ImageAnnotatorClient.create(); } @Teardown public void teardown() { imageAnnotatorClient.close(); } @ProcessElement /** * Performs the call to the Cloud Vision API using a client library. Default access for testing. * * @param requests request list. * @return response list. */ List<AnnotateImageResponse> getResponse(Iterable<AnnotateImageRequest> requests) { List<AnnotateImageRequest> requestList = new ArrayList<>(); requests.forEach(requestList::add); BatchAnnotateImagesResponse batchAnnotateImagesResponse = imageAnnotatorClient.batchAnnotateImages(requestList); return batchAnnotateImagesResponse.getResponsesList(); } }
Since the default limit is 30QPS but the latency will depend a lot on the size of the input let's be cautious, assume smaller sized input data, and use something small like 5 keys. We should make this choice clear in the documentation of the class, constructor with the default, and recommend that a user updates this to better reflect their desired QPS.
public void processElement(ProcessContext context) { Iterable<AnnotateImageRequest> value = context.element().getValue(); ArrayList<AnnotateImageRequest> annotateImageRequests = new ArrayList<>(); value.forEach(annotateImageRequests::add); context.output(annotateImageRequests); }
ArrayList<AnnotateImageRequest> annotateImageRequests = new ArrayList<>();
public void processElement(ProcessContext context) { context.output(getResponse(Objects.requireNonNull(context.element().getValue()))); }
class PerformImageAnnotation extends DoFn<List<AnnotateImageRequest>, List<AnnotateImageResponse>> { private ImageAnnotatorClient imageAnnotatorClient; public PerformImageAnnotation() {} /** * Parametrized constructor to make mock injection easier in testing. * * @param imageAnnotatorClient */ public PerformImageAnnotation(ImageAnnotatorClient imageAnnotatorClient) { this.imageAnnotatorClient = imageAnnotatorClient; } @StartBundle public void startBundle() throws IOException { imageAnnotatorClient = ImageAnnotatorClient.create(); } @Teardown public void teardown() { imageAnnotatorClient.close(); } @ProcessElement /** * Performs the call itself. Default access for testing. * * @param requests request list. * @return response list. */ List<AnnotateImageResponse> getResponse(List<AnnotateImageRequest> requests) { BatchAnnotateImagesResponse batchAnnotateImagesResponse = imageAnnotatorClient.batchAnnotateImages(requests); return batchAnnotateImagesResponse.getResponsesList(); } }
class PerformImageAnnotation extends DoFn<KV<Integer, Iterable<AnnotateImageRequest>>, List<AnnotateImageResponse>> { private transient ImageAnnotatorClient imageAnnotatorClient; public PerformImageAnnotation() {} /** * Parametrized constructor to make mock injection easier in testing. * * @param imageAnnotatorClient */ public PerformImageAnnotation(ImageAnnotatorClient imageAnnotatorClient) { this.imageAnnotatorClient = imageAnnotatorClient; } @Setup public void setup() throws IOException { imageAnnotatorClient = ImageAnnotatorClient.create(); } @Teardown public void teardown() { imageAnnotatorClient.close(); } @ProcessElement /** * Performs the call to the Cloud Vision API using a client library. Default access for testing. * * @param requests request list. * @return response list. */ List<AnnotateImageResponse> getResponse(Iterable<AnnotateImageRequest> requests) { List<AnnotateImageRequest> requestList = new ArrayList<>(); requests.forEach(requestList::add); BatchAnnotateImagesResponse batchAnnotateImagesResponse = imageAnnotatorClient.batchAnnotateImages(requestList); return batchAnnotateImagesResponse.getResponsesList(); } }
@fndejong OK, looks good then. We need tests though: could you try to add some integration tests in the undertow/deployment module? There are already a couple of integration tests there. You can use REST assured to test things, no need for curl. I'm also wondering if you would be interesting in writing some documentation there: https://github.com/quarkusio/quarkus/blob/master/docs/src/main/asciidoc/rest-json-guide.adoc . A new section just before the conclusion would be nice. It's not the perfect place in the long term but for now, it will do.
public SSLContext toSSLContext() throws GeneralSecurityException, IOException { Logger log = Logger.getLogger("io.quarkus.configuration.ssl"); final Optional<Path> certFile = certificate.file; final Optional<Path> keyFile = certificate.keyFile; final Optional<Path> keyStoreFile = certificate.keyStoreFile; final String keystorePassword = certificate.keyStorePassword; final KeyStore keyStore; if (certFile.isPresent() && keyFile.isPresent()) { keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null, keystorePassword.toCharArray()); final Path certPath = certFile.get(); final Iterator<PemEntry<?>> certItr = Pem.parsePemContent(load(certPath)); final ArrayList<X509Certificate> certList = new ArrayList<>(); while (certItr.hasNext()) { final PemEntry<?> item = certItr.next(); final X509Certificate cert = item.tryCast(X509Certificate.class); if (cert != null) { certList.add(cert); } else { log.warnf("Ignoring non-certificate in certificate file \"%s\" (the type was %s)", certPath, item.getEntry().getClass()); } } if (certList.isEmpty()) { log.warnf("No certificate found in file \"%s\"", certPath); } final Path keyPath = keyFile.get(); final Iterator<PemEntry<?>> keyItr = Pem.parsePemContent(load(keyPath)); final PrivateKey privateKey; for (;;) { if (!keyItr.hasNext()) { log.warnf("No key found in file \"%s\"", keyPath); return null; } final PemEntry<?> next = keyItr.next(); final PrivateKey entryKey = next.tryCast(PrivateKey.class); if (entryKey != null) { privateKey = entryKey; break; } log.warnf("Ignoring non-key in key file \"%s\" (the type was %s)", keyPath, next.getEntry().getClass()); } if (keyItr.hasNext()) { log.warnf("Ignoring extra content in key file \"%s\"", keyPath); } keyStore.setEntry("default", new KeyStore.PrivateKeyEntry(privateKey, certList.toArray(new X509Certificate[0])), new KeyStore.PasswordProtection(keystorePassword.toCharArray())); } else if (keyStoreFile.isPresent()) { final Path keyStorePath = keyStoreFile.get(); final Optional<String> keyStoreFileType = certificate.keyStoreFileType; final String type; if (keyStoreFileType.isPresent()) { type = keyStoreFileType.get(); } else { final String pathName = keyStorePath.toString(); if (pathName.endsWith(".jks")) { type = "jks"; } else if (pathName.endsWith(".jceks")) { type = "jceks"; } else if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { type = "pkcs12"; } else { type = "jks"; } } keyStore = KeyStore.getInstance(type); try (InputStream is = Files.newInputStream(keyStorePath)) { keyStore.load(is, null); } } else { return null; } final KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); keyManagerFactory.init(keyStore, keystorePassword.toCharArray()); final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); sslContextBuilder.setCipherSuiteSelector(cipherSuites.orElse(CipherSuiteSelector.openSslDefault())); ProtocolSelector protocolSelector; if (protocols.isEmpty()) { protocolSelector = ProtocolSelector.defaultProtocols(); } else { protocolSelector = ProtocolSelector.empty().add(protocols.toArray(new Protocol[0])); } sslContextBuilder.setProtocolSelector(protocolSelector); sslContextBuilder.setKeyManager((X509ExtendedKeyManager) keyManagerFactory.getKeyManagers()[0]); if (sessionCacheSize.isPresent()) { sslContextBuilder.setSessionCacheSize(sessionCacheSize.getAsInt()); } if (sessionTimeout.isPresent()) { sslContextBuilder.setSessionTimeout((int) min(Integer.MAX_VALUE, sessionTimeout.get().getSeconds())); } if (providerName.isPresent()) { sslContextBuilder.setProviderName(providerName.get()); } return sslContextBuilder.build().create(); }
new KeyStore.PasswordProtection(keystorePassword.toCharArray()));
public SSLContext toSSLContext() throws GeneralSecurityException, IOException { Logger log = Logger.getLogger("io.quarkus.configuration.ssl"); final Optional<Path> certFile = certificate.file; final Optional<Path> keyFile = certificate.keyFile; final Optional<Path> keyStoreFile = certificate.keyStoreFile; final String keystorePassword = certificate.keyStorePassword; final KeyStore keyStore; if (certFile.isPresent() && keyFile.isPresent()) { keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null, keystorePassword.toCharArray()); final Path certPath = certFile.get(); final Iterator<PemEntry<?>> certItr = Pem.parsePemContent(load(certPath)); final ArrayList<X509Certificate> certList = new ArrayList<>(); while (certItr.hasNext()) { final PemEntry<?> item = certItr.next(); final X509Certificate cert = item.tryCast(X509Certificate.class); if (cert != null) { certList.add(cert); } else { log.warnf("Ignoring non-certificate in certificate file \"%s\" (the type was %s)", certPath, item.getEntry().getClass()); } } if (certList.isEmpty()) { log.warnf("No certificate found in file \"%s\"", certPath); } final Path keyPath = keyFile.get(); final Iterator<PemEntry<?>> keyItr = Pem.parsePemContent(load(keyPath)); final PrivateKey privateKey; for (;;) { if (!keyItr.hasNext()) { log.warnf("No key found in file \"%s\"", keyPath); return null; } final PemEntry<?> next = keyItr.next(); final PrivateKey entryKey = next.tryCast(PrivateKey.class); if (entryKey != null) { privateKey = entryKey; break; } log.warnf("Ignoring non-key in key file \"%s\" (the type was %s)", keyPath, next.getEntry().getClass()); } if (keyItr.hasNext()) { log.warnf("Ignoring extra content in key file \"%s\"", keyPath); } keyStore.setEntry("default", new KeyStore.PrivateKeyEntry(privateKey, certList.toArray(new X509Certificate[0])), new KeyStore.PasswordProtection(keystorePassword.toCharArray())); } else if (keyStoreFile.isPresent()) { final Path keyStorePath = keyStoreFile.get(); final Optional<String> keyStoreFileType = certificate.keyStoreFileType; final String type; if (keyStoreFileType.isPresent()) { type = keyStoreFileType.get(); } else { final String pathName = keyStorePath.toString(); if (pathName.endsWith(".jks")) { type = "jks"; } else if (pathName.endsWith(".jceks")) { type = "jceks"; } else if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { type = "pkcs12"; } else { type = "jks"; } } keyStore = KeyStore.getInstance(type); final InputStream keystoreAsResource = this.getClass().getClassLoader() .getResourceAsStream(keyStorePath.toString()); if (keystoreAsResource != null) { try (InputStream is = keystoreAsResource) { keyStore.load(is, null); } } else { try (InputStream is = Files.newInputStream(keyStorePath)) { keyStore.load(is, null); } } } else { return null; } final KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); keyManagerFactory.init(keyStore, keystorePassword.toCharArray()); final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); sslContextBuilder.setCipherSuiteSelector(cipherSuites.orElse(CipherSuiteSelector.openSslDefault())); ProtocolSelector protocolSelector; if (protocols.isEmpty()) { protocolSelector = ProtocolSelector.defaultProtocols(); } else { protocolSelector = ProtocolSelector.empty().add(protocols.toArray(new Protocol[0])); } sslContextBuilder.setProtocolSelector(protocolSelector); sslContextBuilder.setKeyManager((X509ExtendedKeyManager) keyManagerFactory.getKeyManagers()[0]); if (sessionCacheSize.isPresent()) { sslContextBuilder.setSessionCacheSize(sessionCacheSize.getAsInt()); } if (sessionTimeout.isPresent()) { sslContextBuilder.setSessionTimeout((int) min(Integer.MAX_VALUE, sessionTimeout.get().getSeconds())); } if (providerName.isPresent()) { sslContextBuilder.setProviderName(providerName.get()); } return sslContextBuilder.build().create(); }
class ServerSslConfig { /** * The server certificate configuration. */ public CertificateConfig certificate; /** * The cipher suites to use. If none is given, a reasonable default is selected. */ @ConfigItem public Optional<CipherSuiteSelector> cipherSuites; /** * The list of protocols to explicitly enable. */ @ConfigItem(defaultValue = "TLSv1.3,TLSv1.2") public List<Protocol> protocols; /** * The SSL provider name to use. If none is given, the platform default is used. */ @ConfigItem public Optional<String> providerName; /** * The SSL session cache size. If not given, the platform default is used. */ @ConfigItem public OptionalInt sessionCacheSize; /** * The SSL session cache timeout. If not given, the platform default is used. */ @ConfigItem public Optional<Duration> sessionTimeout; /** * Get an {@code SSLContext} for this server configuration. * * @return the {@code SSLContext}, or {@code null} if SSL should not be configured * @throws GeneralSecurityException if something failed in the context setup */ static CodePointIterator load(final Path path) throws IOException { final int size = Math.toIntExact(Files.size(path)); char[] chars = new char[size]; int c = 0; try (InputStream is = Files.newInputStream(path)) { try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8)) { while (c < size) { final int res = isr.read(chars, c, size - c); if (res == -1) break; c += res; } } } return CodePointIterator.ofChars(chars, 0, c); } }
class ServerSslConfig { /** * The server certificate configuration. */ public CertificateConfig certificate; /** * The cipher suites to use. If none is given, a reasonable default is selected. */ @ConfigItem public Optional<CipherSuiteSelector> cipherSuites; /** * The list of protocols to explicitly enable. */ @ConfigItem(defaultValue = "TLSv1.3,TLSv1.2") public List<Protocol> protocols; /** * The SSL provider name to use. If none is given, the platform default is used. */ @ConfigItem public Optional<String> providerName; /** * The SSL session cache size. If not given, the platform default is used. */ @ConfigItem public OptionalInt sessionCacheSize; /** * The SSL session cache timeout. If not given, the platform default is used. */ @ConfigItem public Optional<Duration> sessionTimeout; /** * Get an {@code SSLContext} for this server configuration. * * @return the {@code SSLContext}, or {@code null} if SSL should not be configured * @throws GeneralSecurityException if something failed in the context setup */ static CodePointIterator load(final Path path) throws IOException { final int size = Math.toIntExact(Files.size(path)); char[] chars = new char[size]; int c = 0; try (InputStream is = Files.newInputStream(path)) { try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8)) { while (c < size) { final int res = isr.read(chars, c, size - c); if (res == -1) break; c += res; } } } return CodePointIterator.ofChars(chars, 0, c); } }
ok, done (and there goes the 2-line patch :( )
public void requestPartitions() throws IOException, InterruptedException { if (!requestedPartitionsFlag) { synchronized (requestLock) { if (isReleased) { throw new IllegalStateException("Already released."); } if (numberOfInputChannels != inputChannels.size()) { throw new IllegalStateException("Bug in input gate setup logic: mismatch between" + "number of total input channels and the currently set number of input " + "channels."); } for (InputChannel inputChannel : inputChannels.values()) { inputChannel.requestSubpartition(consumedSubpartitionIndex); } } requestedPartitionsFlag = true; } }
if (!requestedPartitionsFlag) {
public void requestPartitions() throws IOException, InterruptedException { if (requestedPartitionsFlag) { return; } synchronized (requestLock) { if (isReleased) { throw new IllegalStateException("Already released."); } if (numberOfInputChannels != inputChannels.size()) { throw new IllegalStateException("Bug in input gate setup logic: mismatch between" + "number of total input channels and the currently set number of input " + "channels."); } for (InputChannel inputChannel : inputChannels.values()) { inputChannel.requestSubpartition(consumedSubpartitionIndex); } } requestedPartitionsFlag = true; }
class SingleInputGate implements InputGate { private static final Logger LOG = LoggerFactory.getLogger(SingleInputGate.class); /** Lock object to guard partition requests and runtime channel updates. */ private final Object requestLock = new Object(); /** The name of the owning task, for logging purposes. */ private final String owningTaskName; /** The job ID of the owning task. */ private final JobID jobId; /** * The ID of the consumed intermediate result. Each input gate consumes partitions of the * intermediate result specified by this ID. This ID also identifies the input gate at the * consuming task. */ private final IntermediateDataSetID consumedResultId; /** The type of the partition the input gate is consuming. */ private final ResultPartitionType consumedPartitionType; /** * The index of the consumed subpartition of each consumed partition. This index depends on the * {@link DistributionPattern} and the subtask indices of the producing and consuming task. */ private final int consumedSubpartitionIndex; /** The number of input channels (equivalent to the number of consumed partitions). */ private final int numberOfInputChannels; /** * Input channels. There is a one input channel for each consumed intermediate result partition. * We store this in a map for runtime updates of single channels. */ private final Map<IntermediateResultPartitionID, InputChannel> inputChannels; /** Channels, which notified this input gate about available data. */ private final ArrayDeque<InputChannel> inputChannelsWithData = new ArrayDeque<>(); /** * Field guaranteeing uniqueness for inputChannelsWithData queue. Both of those fields should be unified * onto one. */ private final BitSet enqueuedInputChannelsWithData; private final BitSet channelsWithEndOfPartitionEvents; /** The partition state listener listening to failed partition requests. */ private final TaskActions taskActions; /** * Buffer pool for incoming buffers. Incoming data from remote channels is copied to buffers * from this pool. */ private BufferPool bufferPool; /** Global network buffer pool to request and recycle exclusive buffers (only for credit-based). */ private NetworkBufferPool networkBufferPool; private final boolean isCreditBased; private boolean hasReceivedAllEndOfPartitionEvents; /** Flag indicating whether partitions have been requested. */ private boolean requestedPartitionsFlag; /** Flag indicating whether all resources have been released. */ private volatile boolean isReleased; /** Registered listener to forward buffer notifications to. */ private volatile InputGateListener inputGateListener; private final List<TaskEvent> pendingEvents = new ArrayList<>(); private int numberOfUninitializedChannels; /** Number of network buffers to use for each remote input channel. */ private int networkBuffersPerChannel; /** A timer to retrigger local partition requests. Only initialized if actually needed. */ private Timer retriggerLocalRequestTimer; public SingleInputGate( String owningTaskName, JobID jobId, IntermediateDataSetID consumedResultId, final ResultPartitionType consumedPartitionType, int consumedSubpartitionIndex, int numberOfInputChannels, TaskActions taskActions, TaskIOMetricGroup metrics, boolean isCreditBased) { this.owningTaskName = checkNotNull(owningTaskName); this.jobId = checkNotNull(jobId); this.consumedResultId = checkNotNull(consumedResultId); this.consumedPartitionType = checkNotNull(consumedPartitionType); checkArgument(consumedSubpartitionIndex >= 0); this.consumedSubpartitionIndex = consumedSubpartitionIndex; checkArgument(numberOfInputChannels > 0); this.numberOfInputChannels = numberOfInputChannels; this.inputChannels = new HashMap<>(numberOfInputChannels); this.channelsWithEndOfPartitionEvents = new BitSet(numberOfInputChannels); this.enqueuedInputChannelsWithData = new BitSet(numberOfInputChannels); this.taskActions = checkNotNull(taskActions); this.isCreditBased = isCreditBased; } @Override public int getNumberOfInputChannels() { return numberOfInputChannels; } public IntermediateDataSetID getConsumedResultId() { return consumedResultId; } /** * Returns the type of this input channel's consumed result partition. * * @return consumed result partition type */ public ResultPartitionType getConsumedPartitionType() { return consumedPartitionType; } BufferProvider getBufferProvider() { return bufferPool; } public BufferPool getBufferPool() { return bufferPool; } @Override public int getPageSize() { if (bufferPool != null) { return bufferPool.getMemorySegmentSize(); } else { throw new IllegalStateException("Input gate has not been initialized with buffers."); } } public int getNumberOfQueuedBuffers() { for (int retry = 0; retry < 3; retry++) { try { int totalBuffers = 0; for (InputChannel channel : inputChannels.values()) { if (channel instanceof RemoteInputChannel) { totalBuffers += ((RemoteInputChannel) channel).getNumberOfQueuedBuffers(); } } return totalBuffers; } catch (Exception ignored) {} } return 0; } @Override public String getOwningTaskName() { return owningTaskName; } public void setBufferPool(BufferPool bufferPool) { checkState(this.bufferPool == null, "Bug in input gate setup logic: buffer pool has" + "already been set for this input gate."); this.bufferPool = checkNotNull(bufferPool); } /** * Assign the exclusive buffers to all remote input channels directly for credit-based mode. * * @param networkBufferPool The global pool to request and recycle exclusive buffers * @param networkBuffersPerChannel The number of exclusive buffers for each channel */ public void assignExclusiveSegments(NetworkBufferPool networkBufferPool, int networkBuffersPerChannel) throws IOException { checkState(this.isCreditBased, "Bug in input gate setup logic: exclusive buffers only exist with credit-based flow control."); checkState(this.networkBufferPool == null, "Bug in input gate setup logic: global buffer pool has" + "already been set for this input gate."); this.networkBufferPool = checkNotNull(networkBufferPool); this.networkBuffersPerChannel = networkBuffersPerChannel; synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { if (inputChannel instanceof RemoteInputChannel) { ((RemoteInputChannel) inputChannel).assignExclusiveSegments( networkBufferPool.requestMemorySegments(networkBuffersPerChannel)); } } } } /** * The exclusive segments are recycled to network buffer pool directly when input channel is released. * * @param segments The exclusive segments need to be recycled */ public void returnExclusiveSegments(List<MemorySegment> segments) throws IOException { networkBufferPool.recycleMemorySegments(segments); } public void setInputChannel(IntermediateResultPartitionID partitionId, InputChannel inputChannel) { synchronized (requestLock) { if (inputChannels.put(checkNotNull(partitionId), checkNotNull(inputChannel)) == null && inputChannel instanceof UnknownInputChannel) { numberOfUninitializedChannels++; } } } public void updateInputChannel(InputChannelDeploymentDescriptor icdd) throws IOException, InterruptedException { synchronized (requestLock) { if (isReleased) { return; } final IntermediateResultPartitionID partitionId = icdd.getConsumedPartitionId().getPartitionId(); InputChannel current = inputChannels.get(partitionId); if (current instanceof UnknownInputChannel) { UnknownInputChannel unknownChannel = (UnknownInputChannel) current; InputChannel newChannel; ResultPartitionLocation partitionLocation = icdd.getConsumedPartitionLocation(); if (partitionLocation.isLocal()) { newChannel = unknownChannel.toLocalInputChannel(); } else if (partitionLocation.isRemote()) { newChannel = unknownChannel.toRemoteInputChannel(partitionLocation.getConnectionId()); if (this.isCreditBased) { checkState(this.networkBufferPool != null, "Bug in input gate setup logic: " + "global buffer pool has not been set for this input gate."); ((RemoteInputChannel) newChannel).assignExclusiveSegments( networkBufferPool.requestMemorySegments(networkBuffersPerChannel)); } } else { throw new IllegalStateException("Tried to update unknown channel with unknown channel."); } LOG.debug("{}: Updated unknown input channel to {}.", owningTaskName, newChannel); inputChannels.put(partitionId, newChannel); if (requestedPartitionsFlag) { newChannel.requestSubpartition(consumedSubpartitionIndex); } for (TaskEvent event : pendingEvents) { newChannel.sendTaskEvent(event); } if (--numberOfUninitializedChannels == 0) { pendingEvents.clear(); } } } } /** * Retriggers a partition request. */ public void retriggerPartitionRequest(IntermediateResultPartitionID partitionId) throws IOException, InterruptedException { synchronized (requestLock) { if (!isReleased) { final InputChannel ch = inputChannels.get(partitionId); checkNotNull(ch, "Unknown input channel with ID " + partitionId); LOG.debug("{}: Retriggering partition request {}:{}.", owningTaskName, ch.partitionId, consumedSubpartitionIndex); if (ch.getClass() == RemoteInputChannel.class) { final RemoteInputChannel rch = (RemoteInputChannel) ch; rch.retriggerSubpartitionRequest(consumedSubpartitionIndex); } else if (ch.getClass() == LocalInputChannel.class) { final LocalInputChannel ich = (LocalInputChannel) ch; if (retriggerLocalRequestTimer == null) { retriggerLocalRequestTimer = new Timer(true); } ich.retriggerSubpartitionRequest(retriggerLocalRequestTimer, consumedSubpartitionIndex); } else { throw new IllegalStateException( "Unexpected type of channel to retrigger partition: " + ch.getClass()); } } } } public void releaseAllResources() throws IOException { boolean released = false; synchronized (requestLock) { if (!isReleased) { try { LOG.debug("{}: Releasing {}.", owningTaskName, this); if (retriggerLocalRequestTimer != null) { retriggerLocalRequestTimer.cancel(); } for (InputChannel inputChannel : inputChannels.values()) { try { inputChannel.releaseAllResources(); } catch (IOException e) { LOG.warn("{}: Error during release of channel resources: {}.", owningTaskName, e.getMessage(), e); } } if (bufferPool != null) { bufferPool.lazyDestroy(); } } finally { isReleased = true; released = true; } } } if (released) { synchronized (inputChannelsWithData) { inputChannelsWithData.notifyAll(); } } } @Override public boolean isFinished() { synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { if (!inputChannel.isReleased()) { return false; } } } return true; } @Override @Override public Optional<BufferOrEvent> getNextBufferOrEvent() throws IOException, InterruptedException { return getNextBufferOrEvent(true); } @Override public Optional<BufferOrEvent> pollNextBufferOrEvent() throws IOException, InterruptedException { return getNextBufferOrEvent(false); } private Optional<BufferOrEvent> getNextBufferOrEvent(boolean blocking) throws IOException, InterruptedException { if (hasReceivedAllEndOfPartitionEvents) { return Optional.empty(); } if (isReleased) { throw new IllegalStateException("Released"); } requestPartitions(); InputChannel currentChannel; boolean moreAvailable; Optional<BufferAndAvailability> result = Optional.empty(); do { synchronized (inputChannelsWithData) { while (inputChannelsWithData.size() == 0) { if (isReleased) { throw new IllegalStateException("Released"); } if (blocking) { inputChannelsWithData.wait(); } else { return Optional.empty(); } } currentChannel = inputChannelsWithData.remove(); enqueuedInputChannelsWithData.clear(currentChannel.getChannelIndex()); moreAvailable = !inputChannelsWithData.isEmpty(); } result = currentChannel.getNextBuffer(); } while (!result.isPresent()); if (result.get().moreAvailable()) { queueChannel(currentChannel); moreAvailable = true; } final Buffer buffer = result.get().buffer(); if (buffer.isBuffer()) { return Optional.of(new BufferOrEvent(buffer, currentChannel.getChannelIndex(), moreAvailable)); } else { final AbstractEvent event = EventSerializer.fromBuffer(buffer, getClass().getClassLoader()); if (event.getClass() == EndOfPartitionEvent.class) { channelsWithEndOfPartitionEvents.set(currentChannel.getChannelIndex()); if (channelsWithEndOfPartitionEvents.cardinality() == numberOfInputChannels) { checkState(!moreAvailable || !pollNextBufferOrEvent().isPresent()); moreAvailable = false; hasReceivedAllEndOfPartitionEvents = true; } currentChannel.notifySubpartitionConsumed(); currentChannel.releaseAllResources(); } return Optional.of(new BufferOrEvent(event, currentChannel.getChannelIndex(), moreAvailable)); } } @Override public void sendTaskEvent(TaskEvent event) throws IOException { synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { inputChannel.sendTaskEvent(event); } if (numberOfUninitializedChannels > 0) { pendingEvents.add(event); } } } @Override public void registerListener(InputGateListener inputGateListener) { if (this.inputGateListener == null) { this.inputGateListener = inputGateListener; } else { throw new IllegalStateException("Multiple listeners"); } } void notifyChannelNonEmpty(InputChannel channel) { queueChannel(checkNotNull(channel)); } void triggerPartitionStateCheck(ResultPartitionID partitionId) { taskActions.triggerPartitionProducerStateCheck(jobId, consumedResultId, partitionId); } private void queueChannel(InputChannel channel) { int availableChannels; synchronized (inputChannelsWithData) { if (enqueuedInputChannelsWithData.get(channel.getChannelIndex())) { return; } availableChannels = inputChannelsWithData.size(); inputChannelsWithData.add(channel); enqueuedInputChannelsWithData.set(channel.getChannelIndex()); if (availableChannels == 0) { inputChannelsWithData.notifyAll(); } } if (availableChannels == 0) { InputGateListener listener = inputGateListener; if (listener != null) { listener.notifyInputGateNonEmpty(this); } } } Map<IntermediateResultPartitionID, InputChannel> getInputChannels() { return inputChannels; } /** * Creates an input gate and all of its input channels. */ public static SingleInputGate create( String owningTaskName, JobID jobId, ExecutionAttemptID executionId, InputGateDeploymentDescriptor igdd, NetworkEnvironment networkEnvironment, TaskActions taskActions, TaskIOMetricGroup metrics) { final IntermediateDataSetID consumedResultId = checkNotNull(igdd.getConsumedResultId()); final ResultPartitionType consumedPartitionType = checkNotNull(igdd.getConsumedPartitionType()); final int consumedSubpartitionIndex = igdd.getConsumedSubpartitionIndex(); checkArgument(consumedSubpartitionIndex >= 0); final InputChannelDeploymentDescriptor[] icdd = checkNotNull(igdd.getInputChannelDeploymentDescriptors()); final SingleInputGate inputGate = new SingleInputGate( owningTaskName, jobId, consumedResultId, consumedPartitionType, consumedSubpartitionIndex, icdd.length, taskActions, metrics, networkEnvironment.isCreditBased()); final InputChannel[] inputChannels = new InputChannel[icdd.length]; int numLocalChannels = 0; int numRemoteChannels = 0; int numUnknownChannels = 0; for (int i = 0; i < inputChannels.length; i++) { final ResultPartitionID partitionId = icdd[i].getConsumedPartitionId(); final ResultPartitionLocation partitionLocation = icdd[i].getConsumedPartitionLocation(); if (partitionLocation.isLocal()) { inputChannels[i] = new LocalInputChannel(inputGate, i, partitionId, networkEnvironment.getResultPartitionManager(), networkEnvironment.getTaskEventDispatcher(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numLocalChannels++; } else if (partitionLocation.isRemote()) { inputChannels[i] = new RemoteInputChannel(inputGate, i, partitionId, partitionLocation.getConnectionId(), networkEnvironment.getConnectionManager(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numRemoteChannels++; } else if (partitionLocation.isUnknown()) { inputChannels[i] = new UnknownInputChannel(inputGate, i, partitionId, networkEnvironment.getResultPartitionManager(), networkEnvironment.getTaskEventDispatcher(), networkEnvironment.getConnectionManager(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numUnknownChannels++; } else { throw new IllegalStateException("Unexpected partition location."); } inputGate.setInputChannel(partitionId.getPartitionId(), inputChannels[i]); } LOG.debug("{}: Created {} input channels (local: {}, remote: {}, unknown: {}).", owningTaskName, inputChannels.length, numLocalChannels, numRemoteChannels, numUnknownChannels); return inputGate; } }
class SingleInputGate implements InputGate { private static final Logger LOG = LoggerFactory.getLogger(SingleInputGate.class); /** Lock object to guard partition requests and runtime channel updates. */ private final Object requestLock = new Object(); /** The name of the owning task, for logging purposes. */ private final String owningTaskName; /** The job ID of the owning task. */ private final JobID jobId; /** * The ID of the consumed intermediate result. Each input gate consumes partitions of the * intermediate result specified by this ID. This ID also identifies the input gate at the * consuming task. */ private final IntermediateDataSetID consumedResultId; /** The type of the partition the input gate is consuming. */ private final ResultPartitionType consumedPartitionType; /** * The index of the consumed subpartition of each consumed partition. This index depends on the * {@link DistributionPattern} and the subtask indices of the producing and consuming task. */ private final int consumedSubpartitionIndex; /** The number of input channels (equivalent to the number of consumed partitions). */ private final int numberOfInputChannels; /** * Input channels. There is a one input channel for each consumed intermediate result partition. * We store this in a map for runtime updates of single channels. */ private final Map<IntermediateResultPartitionID, InputChannel> inputChannels; /** Channels, which notified this input gate about available data. */ private final ArrayDeque<InputChannel> inputChannelsWithData = new ArrayDeque<>(); /** * Field guaranteeing uniqueness for inputChannelsWithData queue. Both of those fields should be unified * onto one. */ private final BitSet enqueuedInputChannelsWithData; private final BitSet channelsWithEndOfPartitionEvents; /** The partition state listener listening to failed partition requests. */ private final TaskActions taskActions; /** * Buffer pool for incoming buffers. Incoming data from remote channels is copied to buffers * from this pool. */ private BufferPool bufferPool; /** Global network buffer pool to request and recycle exclusive buffers (only for credit-based). */ private NetworkBufferPool networkBufferPool; private final boolean isCreditBased; private boolean hasReceivedAllEndOfPartitionEvents; /** Flag indicating whether partitions have been requested. */ private boolean requestedPartitionsFlag; /** Flag indicating whether all resources have been released. */ private volatile boolean isReleased; /** Registered listener to forward buffer notifications to. */ private volatile InputGateListener inputGateListener; private final List<TaskEvent> pendingEvents = new ArrayList<>(); private int numberOfUninitializedChannels; /** Number of network buffers to use for each remote input channel. */ private int networkBuffersPerChannel; /** A timer to retrigger local partition requests. Only initialized if actually needed. */ private Timer retriggerLocalRequestTimer; public SingleInputGate( String owningTaskName, JobID jobId, IntermediateDataSetID consumedResultId, final ResultPartitionType consumedPartitionType, int consumedSubpartitionIndex, int numberOfInputChannels, TaskActions taskActions, TaskIOMetricGroup metrics, boolean isCreditBased) { this.owningTaskName = checkNotNull(owningTaskName); this.jobId = checkNotNull(jobId); this.consumedResultId = checkNotNull(consumedResultId); this.consumedPartitionType = checkNotNull(consumedPartitionType); checkArgument(consumedSubpartitionIndex >= 0); this.consumedSubpartitionIndex = consumedSubpartitionIndex; checkArgument(numberOfInputChannels > 0); this.numberOfInputChannels = numberOfInputChannels; this.inputChannels = new HashMap<>(numberOfInputChannels); this.channelsWithEndOfPartitionEvents = new BitSet(numberOfInputChannels); this.enqueuedInputChannelsWithData = new BitSet(numberOfInputChannels); this.taskActions = checkNotNull(taskActions); this.isCreditBased = isCreditBased; } @Override public int getNumberOfInputChannels() { return numberOfInputChannels; } public IntermediateDataSetID getConsumedResultId() { return consumedResultId; } /** * Returns the type of this input channel's consumed result partition. * * @return consumed result partition type */ public ResultPartitionType getConsumedPartitionType() { return consumedPartitionType; } BufferProvider getBufferProvider() { return bufferPool; } public BufferPool getBufferPool() { return bufferPool; } @Override public int getPageSize() { if (bufferPool != null) { return bufferPool.getMemorySegmentSize(); } else { throw new IllegalStateException("Input gate has not been initialized with buffers."); } } public int getNumberOfQueuedBuffers() { for (int retry = 0; retry < 3; retry++) { try { int totalBuffers = 0; for (InputChannel channel : inputChannels.values()) { if (channel instanceof RemoteInputChannel) { totalBuffers += ((RemoteInputChannel) channel).getNumberOfQueuedBuffers(); } } return totalBuffers; } catch (Exception ignored) {} } return 0; } @Override public String getOwningTaskName() { return owningTaskName; } public void setBufferPool(BufferPool bufferPool) { checkState(this.bufferPool == null, "Bug in input gate setup logic: buffer pool has" + "already been set for this input gate."); this.bufferPool = checkNotNull(bufferPool); } /** * Assign the exclusive buffers to all remote input channels directly for credit-based mode. * * @param networkBufferPool The global pool to request and recycle exclusive buffers * @param networkBuffersPerChannel The number of exclusive buffers for each channel */ public void assignExclusiveSegments(NetworkBufferPool networkBufferPool, int networkBuffersPerChannel) throws IOException { checkState(this.isCreditBased, "Bug in input gate setup logic: exclusive buffers only exist with credit-based flow control."); checkState(this.networkBufferPool == null, "Bug in input gate setup logic: global buffer pool has" + "already been set for this input gate."); this.networkBufferPool = checkNotNull(networkBufferPool); this.networkBuffersPerChannel = networkBuffersPerChannel; synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { if (inputChannel instanceof RemoteInputChannel) { ((RemoteInputChannel) inputChannel).assignExclusiveSegments( networkBufferPool.requestMemorySegments(networkBuffersPerChannel)); } } } } /** * The exclusive segments are recycled to network buffer pool directly when input channel is released. * * @param segments The exclusive segments need to be recycled */ public void returnExclusiveSegments(List<MemorySegment> segments) throws IOException { networkBufferPool.recycleMemorySegments(segments); } public void setInputChannel(IntermediateResultPartitionID partitionId, InputChannel inputChannel) { synchronized (requestLock) { if (inputChannels.put(checkNotNull(partitionId), checkNotNull(inputChannel)) == null && inputChannel instanceof UnknownInputChannel) { numberOfUninitializedChannels++; } } } public void updateInputChannel(InputChannelDeploymentDescriptor icdd) throws IOException, InterruptedException { synchronized (requestLock) { if (isReleased) { return; } final IntermediateResultPartitionID partitionId = icdd.getConsumedPartitionId().getPartitionId(); InputChannel current = inputChannels.get(partitionId); if (current instanceof UnknownInputChannel) { UnknownInputChannel unknownChannel = (UnknownInputChannel) current; InputChannel newChannel; ResultPartitionLocation partitionLocation = icdd.getConsumedPartitionLocation(); if (partitionLocation.isLocal()) { newChannel = unknownChannel.toLocalInputChannel(); } else if (partitionLocation.isRemote()) { newChannel = unknownChannel.toRemoteInputChannel(partitionLocation.getConnectionId()); if (this.isCreditBased) { checkState(this.networkBufferPool != null, "Bug in input gate setup logic: " + "global buffer pool has not been set for this input gate."); ((RemoteInputChannel) newChannel).assignExclusiveSegments( networkBufferPool.requestMemorySegments(networkBuffersPerChannel)); } } else { throw new IllegalStateException("Tried to update unknown channel with unknown channel."); } LOG.debug("{}: Updated unknown input channel to {}.", owningTaskName, newChannel); inputChannels.put(partitionId, newChannel); if (requestedPartitionsFlag) { newChannel.requestSubpartition(consumedSubpartitionIndex); } for (TaskEvent event : pendingEvents) { newChannel.sendTaskEvent(event); } if (--numberOfUninitializedChannels == 0) { pendingEvents.clear(); } } } } /** * Retriggers a partition request. */ public void retriggerPartitionRequest(IntermediateResultPartitionID partitionId) throws IOException, InterruptedException { synchronized (requestLock) { if (!isReleased) { final InputChannel ch = inputChannels.get(partitionId); checkNotNull(ch, "Unknown input channel with ID " + partitionId); LOG.debug("{}: Retriggering partition request {}:{}.", owningTaskName, ch.partitionId, consumedSubpartitionIndex); if (ch.getClass() == RemoteInputChannel.class) { final RemoteInputChannel rch = (RemoteInputChannel) ch; rch.retriggerSubpartitionRequest(consumedSubpartitionIndex); } else if (ch.getClass() == LocalInputChannel.class) { final LocalInputChannel ich = (LocalInputChannel) ch; if (retriggerLocalRequestTimer == null) { retriggerLocalRequestTimer = new Timer(true); } ich.retriggerSubpartitionRequest(retriggerLocalRequestTimer, consumedSubpartitionIndex); } else { throw new IllegalStateException( "Unexpected type of channel to retrigger partition: " + ch.getClass()); } } } } public void releaseAllResources() throws IOException { boolean released = false; synchronized (requestLock) { if (!isReleased) { try { LOG.debug("{}: Releasing {}.", owningTaskName, this); if (retriggerLocalRequestTimer != null) { retriggerLocalRequestTimer.cancel(); } for (InputChannel inputChannel : inputChannels.values()) { try { inputChannel.releaseAllResources(); } catch (IOException e) { LOG.warn("{}: Error during release of channel resources: {}.", owningTaskName, e.getMessage(), e); } } if (bufferPool != null) { bufferPool.lazyDestroy(); } } finally { isReleased = true; released = true; } } } if (released) { synchronized (inputChannelsWithData) { inputChannelsWithData.notifyAll(); } } } @Override public boolean isFinished() { synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { if (!inputChannel.isReleased()) { return false; } } } return true; } @Override @Override public Optional<BufferOrEvent> getNextBufferOrEvent() throws IOException, InterruptedException { return getNextBufferOrEvent(true); } @Override public Optional<BufferOrEvent> pollNextBufferOrEvent() throws IOException, InterruptedException { return getNextBufferOrEvent(false); } private Optional<BufferOrEvent> getNextBufferOrEvent(boolean blocking) throws IOException, InterruptedException { if (hasReceivedAllEndOfPartitionEvents) { return Optional.empty(); } if (isReleased) { throw new IllegalStateException("Released"); } requestPartitions(); InputChannel currentChannel; boolean moreAvailable; Optional<BufferAndAvailability> result = Optional.empty(); do { synchronized (inputChannelsWithData) { while (inputChannelsWithData.size() == 0) { if (isReleased) { throw new IllegalStateException("Released"); } if (blocking) { inputChannelsWithData.wait(); } else { return Optional.empty(); } } currentChannel = inputChannelsWithData.remove(); enqueuedInputChannelsWithData.clear(currentChannel.getChannelIndex()); moreAvailable = !inputChannelsWithData.isEmpty(); } result = currentChannel.getNextBuffer(); } while (!result.isPresent()); if (result.get().moreAvailable()) { queueChannel(currentChannel); moreAvailable = true; } final Buffer buffer = result.get().buffer(); if (buffer.isBuffer()) { return Optional.of(new BufferOrEvent(buffer, currentChannel.getChannelIndex(), moreAvailable)); } else { final AbstractEvent event = EventSerializer.fromBuffer(buffer, getClass().getClassLoader()); if (event.getClass() == EndOfPartitionEvent.class) { channelsWithEndOfPartitionEvents.set(currentChannel.getChannelIndex()); if (channelsWithEndOfPartitionEvents.cardinality() == numberOfInputChannels) { checkState(!moreAvailable || !pollNextBufferOrEvent().isPresent()); moreAvailable = false; hasReceivedAllEndOfPartitionEvents = true; } currentChannel.notifySubpartitionConsumed(); currentChannel.releaseAllResources(); } return Optional.of(new BufferOrEvent(event, currentChannel.getChannelIndex(), moreAvailable)); } } @Override public void sendTaskEvent(TaskEvent event) throws IOException { synchronized (requestLock) { for (InputChannel inputChannel : inputChannels.values()) { inputChannel.sendTaskEvent(event); } if (numberOfUninitializedChannels > 0) { pendingEvents.add(event); } } } @Override public void registerListener(InputGateListener inputGateListener) { if (this.inputGateListener == null) { this.inputGateListener = inputGateListener; } else { throw new IllegalStateException("Multiple listeners"); } } void notifyChannelNonEmpty(InputChannel channel) { queueChannel(checkNotNull(channel)); } void triggerPartitionStateCheck(ResultPartitionID partitionId) { taskActions.triggerPartitionProducerStateCheck(jobId, consumedResultId, partitionId); } private void queueChannel(InputChannel channel) { int availableChannels; synchronized (inputChannelsWithData) { if (enqueuedInputChannelsWithData.get(channel.getChannelIndex())) { return; } availableChannels = inputChannelsWithData.size(); inputChannelsWithData.add(channel); enqueuedInputChannelsWithData.set(channel.getChannelIndex()); if (availableChannels == 0) { inputChannelsWithData.notifyAll(); } } if (availableChannels == 0) { InputGateListener listener = inputGateListener; if (listener != null) { listener.notifyInputGateNonEmpty(this); } } } Map<IntermediateResultPartitionID, InputChannel> getInputChannels() { return inputChannels; } /** * Creates an input gate and all of its input channels. */ public static SingleInputGate create( String owningTaskName, JobID jobId, ExecutionAttemptID executionId, InputGateDeploymentDescriptor igdd, NetworkEnvironment networkEnvironment, TaskActions taskActions, TaskIOMetricGroup metrics) { final IntermediateDataSetID consumedResultId = checkNotNull(igdd.getConsumedResultId()); final ResultPartitionType consumedPartitionType = checkNotNull(igdd.getConsumedPartitionType()); final int consumedSubpartitionIndex = igdd.getConsumedSubpartitionIndex(); checkArgument(consumedSubpartitionIndex >= 0); final InputChannelDeploymentDescriptor[] icdd = checkNotNull(igdd.getInputChannelDeploymentDescriptors()); final SingleInputGate inputGate = new SingleInputGate( owningTaskName, jobId, consumedResultId, consumedPartitionType, consumedSubpartitionIndex, icdd.length, taskActions, metrics, networkEnvironment.isCreditBased()); final InputChannel[] inputChannels = new InputChannel[icdd.length]; int numLocalChannels = 0; int numRemoteChannels = 0; int numUnknownChannels = 0; for (int i = 0; i < inputChannels.length; i++) { final ResultPartitionID partitionId = icdd[i].getConsumedPartitionId(); final ResultPartitionLocation partitionLocation = icdd[i].getConsumedPartitionLocation(); if (partitionLocation.isLocal()) { inputChannels[i] = new LocalInputChannel(inputGate, i, partitionId, networkEnvironment.getResultPartitionManager(), networkEnvironment.getTaskEventDispatcher(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numLocalChannels++; } else if (partitionLocation.isRemote()) { inputChannels[i] = new RemoteInputChannel(inputGate, i, partitionId, partitionLocation.getConnectionId(), networkEnvironment.getConnectionManager(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numRemoteChannels++; } else if (partitionLocation.isUnknown()) { inputChannels[i] = new UnknownInputChannel(inputGate, i, partitionId, networkEnvironment.getResultPartitionManager(), networkEnvironment.getTaskEventDispatcher(), networkEnvironment.getConnectionManager(), networkEnvironment.getPartitionRequestInitialBackoff(), networkEnvironment.getPartitionRequestMaxBackoff(), metrics ); numUnknownChannels++; } else { throw new IllegalStateException("Unexpected partition location."); } inputGate.setInputChannel(partitionId.getPartitionId(), inputChannels[i]); } LOG.debug("{}: Created {} input channels (local: {}, remote: {}, unknown: {}).", owningTaskName, inputChannels.length, numLocalChannels, numRemoteChannels, numUnknownChannels); return inputGate; } }
Hmm ... so should we try to set a compile version for non-java projects as well, then? Assign the controller version at the time of submission, or use the one from deployment.xml if set?
private void validateDeprecatedElements(ApplicationPackage applicationPackage) { for (var deprecatedElement : applicationPackage.deploymentSpec().deprecatedElements()) { if (applicationPackage.compileVersion().isEmpty()) continue; if (deprecatedElement.majorVersion() >= applicationPackage.compileVersion().get().getMajor()) continue; throw new IllegalArgumentException(deprecatedElement.humanReadableString()); } }
if (applicationPackage.compileVersion().isEmpty()) continue;
private void validateDeprecatedElements(ApplicationPackage applicationPackage) { int wantedMajor = applicationPackage.compileVersion().map(Version::getMajor) .or(() -> applicationPackage.deploymentSpec().majorVersion()) .or(() -> controller.readVersionStatus().controllerVersion() .map(VespaVersion::versionNumber) .map(Version::getMajor)) .orElseThrow(() -> new IllegalArgumentException("Could not determine wanted major version")); for (var deprecatedElement : applicationPackage.deploymentSpec().deprecatedElements()) { if (applicationPackage.compileVersion().isEmpty()) continue; if (deprecatedElement.majorVersion() >= wantedMajor) continue; throw new IllegalArgumentException(deprecatedElement.humanReadableString()); } }
class ApplicationPackageValidator { private final Controller controller; private final ListFlag<String> cloudAccountsFlag; public ApplicationPackageValidator(Controller controller) { this.controller = Objects.requireNonNull(controller, "controller must be non-null"); this.cloudAccountsFlag = PermanentFlags.CLOUD_ACCOUNTS.bindTo(controller.flagSource()); } /** * Validate the given application package * * @throws IllegalArgumentException if any validations fail */ public void validate(Application application, ApplicationPackage applicationPackage, Instant instant) { validateSteps(applicationPackage.deploymentSpec()); validateCloudAccounts(application, applicationPackage.deploymentSpec()); validateEndpointRegions(applicationPackage.deploymentSpec()); validateEndpointChange(application, applicationPackage, instant); validateCompactedEndpoint(applicationPackage); validateSecurityClientsPem(applicationPackage); validateDeprecatedElements(applicationPackage); } /** Verify that deployment spec does not use elements deprecated on a major version older than compile version */ /** Verify that we have the security/clients.pem file for public systems */ private void validateSecurityClientsPem(ApplicationPackage applicationPackage) { if (!controller.system().isPublic() || applicationPackage.deploymentSpec().steps().isEmpty()) return; if (applicationPackage.trustedCertificates().isEmpty()) throw new IllegalArgumentException("Missing required file 'security/clients.pem'"); } /** Verify that each of the production zones listed in the deployment spec exist in this system */ private void validateSteps(DeploymentSpec deploymentSpec) { for (var spec : deploymentSpec.instances()) { new DeploymentSteps(spec, controller.zoneRegistry()).jobs(); spec.zones().stream() .filter(zone -> zone.environment() == Environment.prod) .forEach(zone -> { if ( ! controller.zoneRegistry().hasZone(ZoneId.from(zone.environment(), zone.region().orElseThrow()))) { throw new IllegalArgumentException("Zone " + zone + " in deployment spec was not found in this system!"); } }); } } /** Verify that no single endpoint contains regions in different clouds */ private void validateEndpointRegions(DeploymentSpec deploymentSpec) { for (var instance : deploymentSpec.instances()) { for (var endpoint : instance.endpoints()) { var clouds = new HashSet<CloudName>(); for (var region : endpoint.regions()) { for (ZoneApi zone : controller.zoneRegistry().zones().all().in(Environment.prod).in(region).zones()) { clouds.add(zone.getCloudName()); } } if (clouds.size() != 1) { throw new IllegalArgumentException("Endpoint '" + endpoint.endpointId() + "' in " + instance + " cannot contain regions in different clouds: " + endpoint.regions().stream().sorted().collect(Collectors.toList())); } } } } /** Verify endpoint configuration of given application package */ private void validateEndpointChange(Application application, ApplicationPackage applicationPackage, Instant instant) { applicationPackage.deploymentSpec().instances().forEach(instance -> validateEndpointChange(application, instance.name(), applicationPackage, instant)); } /** Verify that compactable endpoint parts (instance name and endpoint ID) do not clash */ private void validateCompactedEndpoint(ApplicationPackage applicationPackage) { Map<List<String>, InstanceEndpoint> instanceEndpoints = new HashMap<>(); for (var instanceSpec : applicationPackage.deploymentSpec().instances()) { for (var endpoint : instanceSpec.endpoints()) { List<String> nonCompactableIds = nonCompactableIds(instanceSpec.name(), endpoint); InstanceEndpoint instanceEndpoint = new InstanceEndpoint(instanceSpec.name(), endpoint.endpointId()); InstanceEndpoint existingEndpoint = instanceEndpoints.get(nonCompactableIds); if (existingEndpoint != null) { throw new IllegalArgumentException("Endpoint with ID '" + endpoint.endpointId() + "' in instance '" + instanceSpec.name().value() + "' clashes with endpoint '" + existingEndpoint.endpointId + "' in instance '" + existingEndpoint.instance + "'"); } instanceEndpoints.put(nonCompactableIds, instanceEndpoint); } } } /** Verify changes to endpoint configuration by comparing given application package to the existing one, if any */ private void validateEndpointChange(Application application, InstanceName instanceName, ApplicationPackage applicationPackage, Instant instant) { var validationId = ValidationId.globalEndpointChange; if (applicationPackage.validationOverrides().allows(validationId, instant)) return; var endpoints = application.deploymentSpec().instance(instanceName) .map(ApplicationPackageValidator::allEndpointsOf) .orElseGet(List::of); var newEndpoints = allEndpointsOf(applicationPackage.deploymentSpec().requireInstance(instanceName)); if (newEndpoints.containsAll(endpoints)) return; if (containsAllDestinationsOf(endpoints, newEndpoints)) return; var removedEndpoints = new ArrayList<>(endpoints); removedEndpoints.removeAll(newEndpoints); newEndpoints.removeAll(endpoints); throw new IllegalArgumentException(validationId.value() + ": application '" + application.id() + (instanceName.isDefault() ? "" : "." + instanceName.value()) + "' has endpoints " + endpoints + ", but does not include all of these in deployment.xml. Deploying given " + "deployment.xml will remove " + removedEndpoints + (newEndpoints.isEmpty() ? "" : " and add " + newEndpoints) + ". " + ValidationOverrides.toAllowMessage(validationId)); } /** Verify that declared cloud accounts are allowed to be used by the tenant */ private void validateCloudAccounts(Application application, DeploymentSpec deploymentSpec) { TenantName tenant = application.id().tenant(); Set<CloudAccount> validAccounts = cloudAccountsFlag.with(FetchVector.Dimension.TENANT_ID, tenant.value()) .value().stream() .map(CloudAccount::new) .collect(Collectors.toSet()); for (var spec : deploymentSpec.instances()) { for (var zone : spec.zones()) { if (!zone.environment().isProduction()) continue; Optional<CloudAccount> cloudAccount = spec.cloudAccount(zone.environment(), zone.region().get()); if (cloudAccount.isEmpty()) continue; if (validAccounts.contains(cloudAccount.get())) continue; throw new IllegalArgumentException("Cloud account '" + cloudAccount.get().value() + "' is not valid for tenant '" + tenant + "'"); } } } /** Returns whether newEndpoints contains all destinations in endpoints */ private static boolean containsAllDestinationsOf(List<Endpoint> endpoints, List<Endpoint> newEndpoints) { var containsAllRegions = true; var hasSameCluster = true; for (var endpoint : endpoints) { var endpointContainsAllRegions = false; var endpointHasSameCluster = false; for (var newEndpoint : newEndpoints) { if (endpoint.endpointId().equals(newEndpoint.endpointId())) { endpointContainsAllRegions = newEndpoint.regions().containsAll(endpoint.regions()); endpointHasSameCluster = newEndpoint.containerId().equals(endpoint.containerId()); } } containsAllRegions &= endpointContainsAllRegions; hasSameCluster &= endpointHasSameCluster; } return containsAllRegions && hasSameCluster; } /** Returns all configued endpoints of given deployment instance spec */ private static List<Endpoint> allEndpointsOf(DeploymentInstanceSpec deploymentInstanceSpec) { var endpoints = new ArrayList<>(deploymentInstanceSpec.endpoints()); legacyEndpoint(deploymentInstanceSpec).ifPresent(endpoints::add); return endpoints; } /** Returns global service ID as an endpoint, if any global service ID is set */ private static Optional<Endpoint> legacyEndpoint(DeploymentInstanceSpec instance) { return instance.globalServiceId().map(globalServiceId -> { var targets = instance.zones().stream() .filter(zone -> zone.environment().isProduction()) .flatMap(zone -> zone.region().stream()) .distinct() .map(region -> new Endpoint.Target(region, instance.name(), 1)) .collect(Collectors.toList()); return new Endpoint(EndpointId.defaultId().id(), globalServiceId, Endpoint.Level.instance, targets); }); } /** Returns a list of the non-compactable IDs of given instance and endpoint */ private static List<String> nonCompactableIds(InstanceName instance, Endpoint endpoint) { List<String> ids = new ArrayList<>(2); if (!instance.isDefault()) { ids.add(instance.value()); } if (!"default".equals(endpoint.endpointId())) { ids.add(endpoint.endpointId()); } return ids; } private static class InstanceEndpoint { private final InstanceName instance; private final String endpointId; public InstanceEndpoint(InstanceName instance, String endpointId) { this.instance = instance; this.endpointId = endpointId; } } }
class ApplicationPackageValidator { private final Controller controller; private final ListFlag<String> cloudAccountsFlag; public ApplicationPackageValidator(Controller controller) { this.controller = Objects.requireNonNull(controller, "controller must be non-null"); this.cloudAccountsFlag = PermanentFlags.CLOUD_ACCOUNTS.bindTo(controller.flagSource()); } /** * Validate the given application package * * @throws IllegalArgumentException if any validations fail */ public void validate(Application application, ApplicationPackage applicationPackage, Instant instant) { validateSteps(applicationPackage.deploymentSpec()); validateCloudAccounts(application, applicationPackage.deploymentSpec()); validateEndpointRegions(applicationPackage.deploymentSpec()); validateEndpointChange(application, applicationPackage, instant); validateCompactedEndpoint(applicationPackage); validateSecurityClientsPem(applicationPackage); validateDeprecatedElements(applicationPackage); } /** Verify that deployment spec does not use elements deprecated on a major version older than wanted major version */ /** Verify that we have the security/clients.pem file for public systems */ private void validateSecurityClientsPem(ApplicationPackage applicationPackage) { if (!controller.system().isPublic() || applicationPackage.deploymentSpec().steps().isEmpty()) return; if (applicationPackage.trustedCertificates().isEmpty()) throw new IllegalArgumentException("Missing required file 'security/clients.pem'"); } /** Verify that each of the production zones listed in the deployment spec exist in this system */ private void validateSteps(DeploymentSpec deploymentSpec) { for (var spec : deploymentSpec.instances()) { new DeploymentSteps(spec, controller.zoneRegistry()).jobs(); spec.zones().stream() .filter(zone -> zone.environment() == Environment.prod) .forEach(zone -> { if ( ! controller.zoneRegistry().hasZone(ZoneId.from(zone.environment(), zone.region().orElseThrow()))) { throw new IllegalArgumentException("Zone " + zone + " in deployment spec was not found in this system!"); } }); } } /** Verify that no single endpoint contains regions in different clouds */ private void validateEndpointRegions(DeploymentSpec deploymentSpec) { for (var instance : deploymentSpec.instances()) { for (var endpoint : instance.endpoints()) { var clouds = new HashSet<CloudName>(); for (var region : endpoint.regions()) { for (ZoneApi zone : controller.zoneRegistry().zones().all().in(Environment.prod).in(region).zones()) { clouds.add(zone.getCloudName()); } } if (clouds.size() != 1) { throw new IllegalArgumentException("Endpoint '" + endpoint.endpointId() + "' in " + instance + " cannot contain regions in different clouds: " + endpoint.regions().stream().sorted().collect(Collectors.toList())); } } } } /** Verify endpoint configuration of given application package */ private void validateEndpointChange(Application application, ApplicationPackage applicationPackage, Instant instant) { applicationPackage.deploymentSpec().instances().forEach(instance -> validateEndpointChange(application, instance.name(), applicationPackage, instant)); } /** Verify that compactable endpoint parts (instance name and endpoint ID) do not clash */ private void validateCompactedEndpoint(ApplicationPackage applicationPackage) { Map<List<String>, InstanceEndpoint> instanceEndpoints = new HashMap<>(); for (var instanceSpec : applicationPackage.deploymentSpec().instances()) { for (var endpoint : instanceSpec.endpoints()) { List<String> nonCompactableIds = nonCompactableIds(instanceSpec.name(), endpoint); InstanceEndpoint instanceEndpoint = new InstanceEndpoint(instanceSpec.name(), endpoint.endpointId()); InstanceEndpoint existingEndpoint = instanceEndpoints.get(nonCompactableIds); if (existingEndpoint != null) { throw new IllegalArgumentException("Endpoint with ID '" + endpoint.endpointId() + "' in instance '" + instanceSpec.name().value() + "' clashes with endpoint '" + existingEndpoint.endpointId + "' in instance '" + existingEndpoint.instance + "'"); } instanceEndpoints.put(nonCompactableIds, instanceEndpoint); } } } /** Verify changes to endpoint configuration by comparing given application package to the existing one, if any */ private void validateEndpointChange(Application application, InstanceName instanceName, ApplicationPackage applicationPackage, Instant instant) { var validationId = ValidationId.globalEndpointChange; if (applicationPackage.validationOverrides().allows(validationId, instant)) return; var endpoints = application.deploymentSpec().instance(instanceName) .map(ApplicationPackageValidator::allEndpointsOf) .orElseGet(List::of); var newEndpoints = allEndpointsOf(applicationPackage.deploymentSpec().requireInstance(instanceName)); if (newEndpoints.containsAll(endpoints)) return; if (containsAllDestinationsOf(endpoints, newEndpoints)) return; var removedEndpoints = new ArrayList<>(endpoints); removedEndpoints.removeAll(newEndpoints); newEndpoints.removeAll(endpoints); throw new IllegalArgumentException(validationId.value() + ": application '" + application.id() + (instanceName.isDefault() ? "" : "." + instanceName.value()) + "' has endpoints " + endpoints + ", but does not include all of these in deployment.xml. Deploying given " + "deployment.xml will remove " + removedEndpoints + (newEndpoints.isEmpty() ? "" : " and add " + newEndpoints) + ". " + ValidationOverrides.toAllowMessage(validationId)); } /** Verify that declared cloud accounts are allowed to be used by the tenant */ private void validateCloudAccounts(Application application, DeploymentSpec deploymentSpec) { TenantName tenant = application.id().tenant(); Set<CloudAccount> validAccounts = cloudAccountsFlag.with(FetchVector.Dimension.TENANT_ID, tenant.value()) .value().stream() .map(CloudAccount::new) .collect(Collectors.toSet()); for (var spec : deploymentSpec.instances()) { for (var zone : spec.zones()) { if (!zone.environment().isProduction()) continue; Optional<CloudAccount> cloudAccount = spec.cloudAccount(zone.environment(), zone.region().get()); if (cloudAccount.isEmpty()) continue; if (validAccounts.contains(cloudAccount.get())) continue; throw new IllegalArgumentException("Cloud account '" + cloudAccount.get().value() + "' is not valid for tenant '" + tenant + "'"); } } } /** Returns whether newEndpoints contains all destinations in endpoints */ private static boolean containsAllDestinationsOf(List<Endpoint> endpoints, List<Endpoint> newEndpoints) { var containsAllRegions = true; var hasSameCluster = true; for (var endpoint : endpoints) { var endpointContainsAllRegions = false; var endpointHasSameCluster = false; for (var newEndpoint : newEndpoints) { if (endpoint.endpointId().equals(newEndpoint.endpointId())) { endpointContainsAllRegions = newEndpoint.regions().containsAll(endpoint.regions()); endpointHasSameCluster = newEndpoint.containerId().equals(endpoint.containerId()); } } containsAllRegions &= endpointContainsAllRegions; hasSameCluster &= endpointHasSameCluster; } return containsAllRegions && hasSameCluster; } /** Returns all configued endpoints of given deployment instance spec */ private static List<Endpoint> allEndpointsOf(DeploymentInstanceSpec deploymentInstanceSpec) { var endpoints = new ArrayList<>(deploymentInstanceSpec.endpoints()); legacyEndpoint(deploymentInstanceSpec).ifPresent(endpoints::add); return endpoints; } /** Returns global service ID as an endpoint, if any global service ID is set */ private static Optional<Endpoint> legacyEndpoint(DeploymentInstanceSpec instance) { return instance.globalServiceId().map(globalServiceId -> { var targets = instance.zones().stream() .filter(zone -> zone.environment().isProduction()) .flatMap(zone -> zone.region().stream()) .distinct() .map(region -> new Endpoint.Target(region, instance.name(), 1)) .collect(Collectors.toList()); return new Endpoint(EndpointId.defaultId().id(), globalServiceId, Endpoint.Level.instance, targets); }); } /** Returns a list of the non-compactable IDs of given instance and endpoint */ private static List<String> nonCompactableIds(InstanceName instance, Endpoint endpoint) { List<String> ids = new ArrayList<>(2); if (!instance.isDefault()) { ids.add(instance.value()); } if (!"default".equals(endpoint.endpointId())) { ids.add(endpoint.endpointId()); } return ids; } private static class InstanceEndpoint { private final InstanceName instance; private final String endpointId; public InstanceEndpoint(InstanceName instance, String endpointId) { this.instance = instance; this.endpointId = endpointId; } } }
Looks to me there is not need to have the process to allocate slot1, offer slot and release it. The slot offering only would be enough to add a free slot. I think we can simplify it, maybe in a separate commit.
public void testAllocateWithFreeSlot() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); SlotRequestId requestId1 = new SlotRequestId(); CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot( slotPool, requestId1 ); assertFalse(future1.isDone()); final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); final SlotOffer slotOffer = new SlotOffer( slotRequest.getAllocationId(), 0, DEFAULT_TESTING_PROFILE); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS); assertTrue(future1.isDone()); slotPool.releaseSlot(requestId1, null); assertEquals(1, slotPool.getAvailableSlots().size()); assertEquals(0, slotPool.getAllocatedSlots().size()); Optional<PhysicalSlot> optional = slotPool.allocateAvailableSlot( new SlotRequestId(), slotRequest.getAllocationId() ); assertTrue(optional.isPresent()); PhysicalSlot slot2 = optional.get(); assertEquals(slot1, slot2); } }
slotPool.releaseSlot(requestId1, null);
public void testAllocateWithFreeSlot() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final AllocationID allocationId = new AllocationID(); assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, allocationId)); assertEquals(1, slotPool.getAvailableSlots().size()); assertEquals(0, slotPool.getAllocatedSlots().size()); Optional<PhysicalSlot> physicalSlot = slotPool.allocateAvailableSlot( new SlotRequestId(), allocationId); assertTrue(physicalSlot.isPresent()); assertEquals(0, slotPool.getAvailableSlots().size()); assertEquals(1, slotPool.getAllocatedSlots().size()); } }
class SlotPoolImplTest extends TestLogger { private final Time timeout = Time.seconds(10L); private JobID jobId; private TaskManagerLocation taskManagerLocation; private SimpleAckingTaskManagerGateway taskManagerGateway; private TestingResourceManagerGateway resourceManagerGateway; private ComponentMainThreadExecutor mainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forMainThread(); @Before public void setUp() throws Exception { this.jobId = new JobID(); taskManagerLocation = new LocalTaskManagerLocation(); taskManagerGateway = new SimpleAckingTaskManagerGateway(); resourceManagerGateway = new TestingResourceManagerGateway(); } @Test public void testAllocateSimpleSlot() throws Exception { CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); SlotRequestId requestId = new SlotRequestId(); CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(slotPool, requestId); assertFalse(future.isDone()); final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); final SlotOffer slotOffer = new SlotOffer( slotRequest.getAllocationId(), 0, DEFAULT_TESTING_PROFILE); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); PhysicalSlot physicalSlot = future.get(1, TimeUnit.SECONDS); assertTrue(future.isDone()); assertEquals(taskManagerLocation, physicalSlot.getTaskManagerLocation()); assertEquals(slotRequest.getAllocationId(), physicalSlot.getAllocationId()); } } @Nonnull private SlotPoolImpl createSlotPoolImpl() { return new TestingSlotPoolImpl(jobId); } @Test public void testAllocationFulfilledByReturnedSlot() throws Exception { final ArrayBlockingQueue<SlotRequest> slotRequestQueue = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer(slotRequest -> { while (!slotRequestQueue.offer(slotRequest)) { } }); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); SlotRequestId requestId1 = new SlotRequestId(); CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot( slotPool, requestId1 ); SlotRequestId requestId2 = new SlotRequestId(); CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot( slotPool, requestId2 ); assertFalse(future1.isDone()); assertFalse(future2.isDone()); final List<SlotRequest> slotRequests = new ArrayList<>(2); for (int i = 0; i < 2; i++) { slotRequests.add(slotRequestQueue.poll(timeout.toMilliseconds(), TimeUnit.MILLISECONDS)); } final SlotOffer slotOffer = new SlotOffer( slotRequests.get(0).getAllocationId(), 0, DEFAULT_TESTING_PROFILE); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS); assertTrue(future1.isDone()); assertFalse(future2.isDone()); slotPool.releaseSlot(requestId1, null); PhysicalSlot slot2 = future2.get(1, TimeUnit.SECONDS); assertTrue(future2.isDone()); assertEquals(slot1, slot2); } } @Test @Test public void testOfferSlot() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); SlotRequestId requestId = new SlotRequestId(); CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot( slotPool, requestId ); assertFalse(future.isDone()); final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); final SlotOffer slotOffer = new SlotOffer( slotRequest.getAllocationId(), 0, DEFAULT_TESTING_PROFILE); final TaskManagerLocation invalidTaskManagerLocation = new LocalTaskManagerLocation(); assertFalse(slotPool.offerSlot(invalidTaskManagerLocation, taskManagerGateway, slotOffer)); final SlotOffer nonRequestedSlotOffer = new SlotOffer( new AllocationID(), 0, DEFAULT_TESTING_PROFILE); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, nonRequestedSlotOffer)); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); PhysicalSlot slot = future.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertEquals(1, slotPool.getAvailableSlots().size()); assertEquals(1, slotPool.getAllocatedSlots().size()); assertEquals(taskManagerLocation, slot.getTaskManagerLocation()); assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId()); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertEquals(1, slotPool.getAllocatedSlots().size()); assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId()); final SlotOffer anotherSlotOfferWithSameAllocationId = new SlotOffer( slotRequest.getAllocationId(), 1, DEFAULT_TESTING_PROFILE); assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId)); TaskManagerLocation anotherTaskManagerLocation = new LocalTaskManagerLocation(); assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer)); slotPool.releaseSlot(requestId, null); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId)); assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer)); } } @Test public void testReleaseResource() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); SlotRequestId requestId1 = new SlotRequestId(); CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot( slotPool, requestId1 ); final SlotRequest slotRequest = slotRequestFuture.get(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot( slotPool, new SlotRequestId() ); final SlotOffer slotOffer = new SlotOffer( slotRequest.getAllocationId(), 0, DEFAULT_TESTING_PROFILE); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS); assertTrue(future1.isDone()); assertFalse(future2.isDone()); final CompletableFuture<?> releaseFuture = new CompletableFuture<>(); SingleLogicalSlot logicalSlot = SingleLogicalSlot.allocateFromPhysicalSlot( requestId1, slot1, Locality.UNKNOWN, new DummySlotOwner(), true ); logicalSlot.tryAssignPayload(new DummyPayload(releaseFuture)); slotPool.releaseTaskManager(taskManagerLocation.getResourceID(), null); releaseFuture.get(1, TimeUnit.SECONDS); assertFalse(logicalSlot.isAlive()); Thread.sleep(10); assertFalse(future2.isDone()); } } /** * Tests that unused offered slots are directly used to fulfill pending slot * requests. * * <p>Moreover it tests that the old slot request is canceled * * <p>See FLINK-8089, FLINK-8934 */ @Test public void testFulfillingSlotRequestsWithUnusedOfferedSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer( (SlotRequest slotRequest) -> allocationIds.offer(slotRequest.getAllocationId())); final ArrayBlockingQueue<AllocationID> canceledAllocations = new ArrayBlockingQueue<>(2); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::offer); final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot( slotPool, slotRequestId1 ); final AllocationID allocationId1 = allocationIds.take(); CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot( slotPool, slotRequestId2 ); final AllocationID allocationId2 = allocationIds.take(); slotPool.releaseSlot(slotRequestId1, null); try { slotFuture1.get(); fail("The first slot future should have failed because it was cancelled."); } catch (ExecutionException ee) { assertTrue(ExceptionUtils.stripExecutionException(ee) instanceof FlinkException); } assertEquals(allocationId1, canceledAllocations.take()); final SlotOffer slotOffer = new SlotOffer(allocationId1, 0, ResourceProfile.ANY); slotPool.registerTaskManager(taskManagerLocation.getResourceID()); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertEquals(allocationId1, slotFuture2.get().getAllocationId()); assertEquals(allocationId2, canceledAllocations.take()); } } /** * Tests that a SlotPoolImpl shutdown releases all registered slots. */ @Test public void testShutdownReleasesAllSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final int numSlotOffers = 2; final Collection<SlotOffer> slotOffers = new ArrayList<>(numSlotOffers); for (int i = 0; i < numSlotOffers; i++) { slotOffers.add( new SlotOffer( new AllocationID(), i, ResourceProfile.ANY)); } final ArrayBlockingQueue<AllocationID> freedSlotQueue = new ArrayBlockingQueue<>(numSlotOffers); taskManagerGateway.setFreeSlotFunction( (AllocationID allocationID, Throwable cause) -> { try { freedSlotQueue.put(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); } catch (InterruptedException e) { return FutureUtils.completedExceptionally(e); } }); final Collection<SlotOffer> acceptedSlotOffers = slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); assertThat(acceptedSlotOffers, Matchers.equalTo(slotOffers)); slotPool.close(); ArrayList<AllocationID> freedSlots = new ArrayList<>(numSlotOffers); while (freedSlots.size() < numSlotOffers) { freedSlotQueue.drainTo(freedSlots); } assertThat(freedSlots, Matchers.containsInAnyOrder(slotOffers.stream().map(SlotOffer::getAllocationId).toArray())); } } @Test public void testCheckIdleSlot() throws Exception { final ManualClock clock = new ManualClock(); try (TestingSlotPoolImpl slotPool = createSlotPoolImpl(clock)) { final BlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1); taskManagerGateway.setFreeSlotFunction( (AllocationID allocationId, Throwable cause) -> { try { freedSlots.put(allocationId); return CompletableFuture.completedFuture(Acknowledge.get()); } catch (InterruptedException e) { return FutureUtils.completedExceptionally(e); } }); setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor); final AllocationID expiredSlotID = new AllocationID(); final AllocationID freshSlotID = new AllocationID(); final SlotOffer slotToExpire = new SlotOffer(expiredSlotID, 0, ResourceProfile.ANY); final SlotOffer slotToNotExpire = new SlotOffer(freshSlotID, 1, ResourceProfile.ANY); assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true)); assertThat( slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true)); clock.advanceTime(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToNotExpire), Matchers.is(true)); clock.advanceTime(1L, TimeUnit.MILLISECONDS); slotPool.triggerCheckIdleSlot(); final AllocationID freedSlot = freedSlots.poll(timeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(freedSlot, Matchers.is(expiredSlotID)); assertThat(freedSlots.isEmpty(), Matchers.is(true)); } } @Nonnull private TestingSlotPoolImpl createSlotPoolImpl(ManualClock clock) { return new TestingSlotPoolImpl( jobId, clock, TestingUtils.infiniteTime(), timeout, TestingUtils.infiniteTime()); } /** * Tests that idle slots which cannot be released will be discarded. See FLINK-11059. */ @Test public void testDiscardIdleSlotIfReleasingFailed() throws Exception { final ManualClock clock = new ManualClock(); try (TestingSlotPoolImpl slotPool = createSlotPoolImpl(clock)) { setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor); final AllocationID expiredAllocationId = new AllocationID(); final SlotOffer slotToExpire = new SlotOffer(expiredAllocationId, 0, ResourceProfile.ANY); OneShotLatch freeSlotLatch = new OneShotLatch(); taskManagerGateway.setFreeSlotFunction((AllocationID allocationId, Throwable cause) -> { freeSlotLatch.trigger(); return FutureUtils.completedExceptionally(new TimeoutException("Test failure")); }); assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true)); assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true)); clock.advanceTime(timeout.toMilliseconds() + 1, TimeUnit.MILLISECONDS); slotPool.triggerCheckIdleSlot(); freeSlotLatch.await(); CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot( slotPool, new SlotRequestId() ); try { future.get(10L, TimeUnit.MILLISECONDS); fail("Expected to fail with a timeout."); } catch (TimeoutException ignored) { assertEquals(0, slotPool.getAvailableSlots().size()); } } } /** * Tests that failed slots are freed on the {@link TaskExecutor}. */ @Test public void testFreeFailedSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final int parallelism = 5; final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(parallelism); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.offer(slotRequest.getAllocationId())); final Map<SlotRequestId, CompletableFuture<PhysicalSlot>> slotRequestFutures = new HashMap<>(parallelism); for (int i = 0; i < parallelism; i++) { final SlotRequestId slotRequestId = new SlotRequestId(); slotRequestFutures.put(slotRequestId, requestNewAllocatedSlot(slotPool, slotRequestId)); } final List<SlotOffer> slotOffers = new ArrayList<>(parallelism); for (int i = 0; i < parallelism; i++) { slotOffers.add(new SlotOffer(allocationIds.take(), i, ResourceProfile.ANY)); } slotPool.registerTaskManager(taskManagerLocation.getResourceID()); slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); FutureUtils.waitForAll(slotRequestFutures.values()).get(); final ArrayBlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1); taskManagerGateway.setFreeSlotFunction( (allocationID, throwable) -> { freedSlots.offer(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }); final FlinkException failException = new FlinkException("Test fail exception"); for (int i = 0; i < parallelism - 1; i++) { final SlotOffer slotOffer = slotOffers.get(i); Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation(slotOffer.getAllocationId(), failException); assertThat(emptyTaskExecutorFuture.isPresent(), is(false)); assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId()))); } final SlotOffer slotOffer = slotOffers.get(parallelism - 1); final Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation( slotOffer.getAllocationId(), failException); assertTrue(emptyTaskExecutorFuture.isPresent()); assertThat(emptyTaskExecutorFuture.get(), is(equalTo(taskManagerLocation.getResourceID()))); assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId()))); } } /** * Tests that create report of allocated slots on a {@link TaskExecutor}. */ @Test public void testCreateAllocatedSlotReport() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(1); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.offer(slotRequest.getAllocationId())); final SlotRequestId slotRequestId = new SlotRequestId(); final CompletableFuture<PhysicalSlot> slotRequestFuture = requestNewAllocatedSlot( slotPool, slotRequestId ); final List<AllocatedSlotInfo> allocatedSlotInfos = new ArrayList<>(2); final List<SlotOffer> slotOffers = new ArrayList<>(2); final AllocationID allocatedId = allocationIds.take(); slotOffers.add(new SlotOffer(allocatedId, 0, ResourceProfile.ANY)); allocatedSlotInfos.add(new AllocatedSlotInfo(0, allocatedId)); final AllocationID availableId = new AllocationID(); slotOffers.add(new SlotOffer(availableId, 1, ResourceProfile.ANY)); allocatedSlotInfos.add(new AllocatedSlotInfo(1, availableId)); slotPool.registerTaskManager(taskManagerLocation.getResourceID()); slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); slotRequestFuture.get(1, TimeUnit.SECONDS); final AllocatedSlotReport slotReport = slotPool.createAllocatedSlotReport(taskManagerLocation.getResourceID()); assertThat(jobId, is(slotReport.getJobId())); assertThat(slotReport.getAllocatedSlotInfos(), containsInAnyOrder(isEachEqual(allocatedSlotInfos))); } } @Test public void testCalculationOfTaskExecutorUtilization() throws Exception { try (final SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final TaskManagerLocation firstTaskManagerLocation = new LocalTaskManagerLocation(); final TaskManagerLocation secondTaskManagerLocation = new LocalTaskManagerLocation(); final List<AllocationID> firstTaskManagersSlots = registerAndOfferSlots(firstTaskManagerLocation, slotPool, 4); final List<AllocationID> secondTaskManagersSlots = registerAndOfferSlots(secondTaskManagerLocation, slotPool, 4); slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(0)); slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(1)); slotPool.allocateAvailableSlot(new SlotRequestId(), secondTaskManagersSlots.get(3)); final Collection<SlotInfoWithUtilization> availableSlotsInformation = slotPool.getAvailableSlotsInformation(); final Map<TaskManagerLocation, Double> utilizationPerTaskExecutor = ImmutableMap.of( firstTaskManagerLocation, 2.0 / 4, secondTaskManagerLocation, 1.0 / 4); for (SlotInfoWithUtilization slotInfoWithUtilization : availableSlotsInformation) { final double expectedTaskExecutorUtilization = utilizationPerTaskExecutor.get(slotInfoWithUtilization.getTaskManagerLocation()); assertThat(slotInfoWithUtilization.getTaskExecutorUtilization(), is(closeTo(expectedTaskExecutorUtilization, 0.1))); } } } @Test public void testOrphanedAllocationCanBeRemapped() throws Exception { final List<AllocationID> allocationIds = new ArrayList<>(); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.add(slotRequest.getAllocationId())); final List<AllocationID> canceledAllocations = new ArrayList<>(); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2); final AllocationID allocationId1 = allocationIds.get(0); final AllocationID allocationId2 = allocationIds.get(1); offerSlot(slotPool, allocationId2); assertThat(slotPool.getPendingRequests().values(), hasSize(1)); assertThat(slotPool.getPendingRequests().containsKeyA(slotRequestId2), is(true)); assertThat(slotPool.getPendingRequests().containsKeyB(allocationId1), is(true)); assertThat(canceledAllocations, hasSize(0)); } } @Test public void testOrphanedAllocationIsCanceledIfNotRemapped() throws Exception { final List<AllocationID> allocationIds = new ArrayList<>(); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.add(slotRequest.getAllocationId())); final List<AllocationID> canceledAllocations = new ArrayList<>(); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add); try (SlotPoolImpl slotPool = createAndSetUpSlotPool()) { final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2); final AllocationID allocationId1 = allocationIds.get(0); final AllocationID allocationId2 = allocationIds.get(1); AllocationID randomAllocationId; do { randomAllocationId = new AllocationID(); } while (randomAllocationId.equals(allocationId1) || randomAllocationId.equals(allocationId2)); offerSlot(slotPool, randomAllocationId); assertThat(slotPool.getPendingRequests().values(), hasSize(1)); assertThat(canceledAllocations, contains(allocationId1)); } } /** * In this case a slot is offered to the SlotPoolImpl before the ResourceManager is connected. * It can happen in production if a TaskExecutor is reconnected to a restarted JobMaster. */ @Test public void testSlotsOfferedWithoutResourceManagerConnected() throws Exception { try (SlotPoolImpl slotPool = createSlotPoolImpl()) { slotPool.start(JobMasterId.generate(), "mock-address", mainThreadExecutor); final SlotRequestId slotRequestId = new SlotRequestId(); final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, slotRequestId); assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(1)); final AllocationID allocationId = new AllocationID(); offerSlot(slotPool, allocationId); assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(0)); assertThat(slotFuture.isDone(), is(true)); assertThat(slotFuture.isCompletedExceptionally(), is(false)); assertThat(slotFuture.getNow(null).getAllocationId(), is(allocationId)); } } private void requestNewAllocatedSlots(final SlotPool slotPool, final SlotRequestId... slotRequestIds) { for (SlotRequestId slotRequestId : slotRequestIds) { requestNewAllocatedSlot(slotPool, slotRequestId); } } private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot( final SlotPool slotPool, final SlotRequestId slotRequestId) { return slotPool.requestNewAllocatedSlot(slotRequestId, ResourceProfile.UNKNOWN, timeout); } private void offerSlot(final SlotPoolImpl slotPool, final AllocationID allocationId) { final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.ANY); slotPool.registerTaskManager(taskManagerLocation.getResourceID()); slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer); } private List<AllocationID> registerAndOfferSlots(TaskManagerLocation taskManagerLocation, SlotPoolImpl slotPool, int numberOfSlotsToRegister) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final List<AllocationID> allocationIds = IntStream.range(0, numberOfSlotsToRegister) .mapToObj(ignored -> new AllocationID()) .collect(Collectors.toList()); Collection<SlotOffer> slotOffers = IntStream.range(0, numberOfSlotsToRegister) .mapToObj(index -> new SlotOffer(allocationIds.get(index), index, ResourceProfile.ANY)) .collect(Collectors.toList()); slotPool.offerSlots( taskManagerLocation, new SimpleAckingTaskManagerGateway(), slotOffers); return allocationIds; } private static Collection<Matcher<? super AllocatedSlotInfo>> isEachEqual(Collection<AllocatedSlotInfo> allocatedSlotInfos) { return allocatedSlotInfos .stream() .map(SlotPoolImplTest::isEqualAllocatedSlotInfo) .collect(Collectors.toList()); } private static Matcher<AllocatedSlotInfo> isEqualAllocatedSlotInfo(AllocatedSlotInfo expectedAllocatedSlotInfo) { return new TypeSafeDiagnosingMatcher<AllocatedSlotInfo>() { @Override public void describeTo(Description description) { description.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo)); } private String describeAllocatedSlotInformation(AllocatedSlotInfo expectedAllocatedSlotInformation) { return expectedAllocatedSlotInformation.toString(); } @Override protected boolean matchesSafely(AllocatedSlotInfo item, Description mismatchDescription) { final boolean matches = item.getAllocationId().equals(expectedAllocatedSlotInfo.getAllocationId()) && item.getSlotIndex() == expectedAllocatedSlotInfo.getSlotIndex(); if (!matches) { mismatchDescription .appendText("Actual value ") .appendText(describeAllocatedSlotInformation(item)) .appendText(" differs from expected value ") .appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo)); } return matches; } }; } private SlotPoolImpl createAndSetUpSlotPool() throws Exception { final SlotPoolImpl slotPool = createSlotPoolImpl(); setupSlotPool(slotPool, resourceManagerGateway, mainThreadExecutor); return slotPool; } private static void setupSlotPool( SlotPoolImpl slotPool, ResourceManagerGateway resourceManagerGateway, ComponentMainThreadExecutor mainThreadExecutable) throws Exception { final String jobManagerAddress = "foobar"; slotPool.start(JobMasterId.generate(), jobManagerAddress, mainThreadExecutable); slotPool.connectToResourceManager(resourceManagerGateway); } }
class SlotPoolImplTest extends TestLogger { private static final Time TIMEOUT = SlotPoolUtils.TIMEOUT; private TaskManagerLocation taskManagerLocation; private SimpleAckingTaskManagerGateway taskManagerGateway; private TestingResourceManagerGateway resourceManagerGateway; private static final ComponentMainThreadExecutor mainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forMainThread(); @Before public void setUp() throws Exception { taskManagerLocation = new LocalTaskManagerLocation(); taskManagerGateway = new SimpleAckingTaskManagerGateway(); resourceManagerGateway = new TestingResourceManagerGateway(); } @Test public void testAllocateSimpleSlot() throws Exception { CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final SlotRequestId requestId = new SlotRequestId(); final CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot(slotPool, requestId); assertFalse(future.isDone()); final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequest.getAllocationId())); final PhysicalSlot physicalSlot = future.get(1, TimeUnit.SECONDS); assertTrue(future.isDone()); assertEquals(taskManagerLocation, physicalSlot.getTaskManagerLocation()); assertEquals(slotRequest.getAllocationId(), physicalSlot.getAllocationId()); } } @Test public void testAllocationFulfilledByReturnedSlot() throws Exception { final ArrayBlockingQueue<SlotRequest> slotRequestQueue = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer(slotRequest -> { while (!slotRequestQueue.offer(slotRequest)) { } }); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final SlotRequestId requestId1 = new SlotRequestId(); final CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot( slotPool, requestId1); final SlotRequestId requestId2 = new SlotRequestId(); final CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot( slotPool, requestId2); assertFalse(future1.isDone()); assertFalse(future2.isDone()); final List<SlotRequest> slotRequests = new ArrayList<>(2); for (int i = 0; i < 2; i++) { slotRequests.add(slotRequestQueue.poll(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS)); } assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequests.get(0).getAllocationId())); final PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS); assertTrue(future1.isDone()); assertFalse(future2.isDone()); slotPool.releaseSlot(requestId1, null); final PhysicalSlot slot2 = future2.get(1, TimeUnit.SECONDS); assertTrue(future2.isDone()); assertEquals(slot1, slot2); } } @Test @Test public void testOfferSlot() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final SlotRequestId requestId = new SlotRequestId(); final CompletableFuture<PhysicalSlot> future = requestNewAllocatedSlot( slotPool, requestId); assertFalse(future.isDone()); final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); final SlotOffer slotOffer = new SlotOffer( slotRequest.getAllocationId(), 0, ResourceProfile.ANY); final TaskManagerLocation invalidTaskManagerLocation = new LocalTaskManagerLocation(); assertFalse(slotPool.offerSlot(invalidTaskManagerLocation, taskManagerGateway, slotOffer)); final SlotOffer nonRequestedSlotOffer = new SlotOffer( new AllocationID(), 0, ResourceProfile.ANY); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, nonRequestedSlotOffer)); assertEquals(1, slotPool.getAllocatedSlots().size()); final PhysicalSlot slot = future.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertEquals(taskManagerLocation, slot.getTaskManagerLocation()); assertEquals(nonRequestedSlotOffer.getAllocationId(), slot.getAllocationId()); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertEquals(1, slotPool.getAvailableSlots().size()); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertEquals(1, slotPool.getAvailableSlots().size()); assertEquals(1, slotPool.getAllocatedSlots().size()); final SlotOffer anotherSlotOfferWithSameAllocationId = new SlotOffer( slotRequest.getAllocationId(), 1, ResourceProfile.ANY); assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId)); TaskManagerLocation anotherTaskManagerLocation = new LocalTaskManagerLocation(); assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer)); slotPool.releaseSlot(requestId, null); assertTrue(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer)); assertFalse(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, anotherSlotOfferWithSameAllocationId)); assertFalse(slotPool.offerSlot(anotherTaskManagerLocation, taskManagerGateway, slotOffer)); } } @Test public void testReleaseResource() throws Exception { final CompletableFuture<SlotRequest> slotRequestFuture = new CompletableFuture<>(); resourceManagerGateway.setRequestSlotConsumer(slotRequestFuture::complete); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final SlotRequestId requestId1 = new SlotRequestId(); final CompletableFuture<PhysicalSlot> future1 = requestNewAllocatedSlot( slotPool, requestId1); final SlotRequest slotRequest = slotRequestFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); final CompletableFuture<PhysicalSlot> future2 = requestNewAllocatedSlot( slotPool, new SlotRequestId()); assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, slotRequest.getAllocationId())); final PhysicalSlot slot1 = future1.get(1, TimeUnit.SECONDS); assertTrue(future1.isDone()); assertFalse(future2.isDone()); final CompletableFuture<?> releaseFuture = new CompletableFuture<>(); final SingleLogicalSlot logicalSlot = SingleLogicalSlot.allocateFromPhysicalSlot( requestId1, slot1, Locality.UNKNOWN, new DummySlotOwner(), true); logicalSlot.tryAssignPayload(new DummyPayload(releaseFuture)); slotPool.releaseTaskManager(taskManagerLocation.getResourceID(), null); releaseFuture.get(); assertFalse(logicalSlot.isAlive()); Thread.sleep(10); assertFalse(future2.isDone()); } } /** * Tests that unused offered slots are directly used to fulfill pending slot * requests. * * <p>Moreover it tests that the old slot request is canceled * * <p>See FLINK-8089, FLINK-8934 */ @Test public void testFulfillingSlotRequestsWithUnusedOfferedSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer( (SlotRequest slotRequest) -> allocationIds.offer(slotRequest.getAllocationId())); final ArrayBlockingQueue<AllocationID> canceledAllocations = new ArrayBlockingQueue<>(2); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::offer); final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); final CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot( slotPool, slotRequestId1); final AllocationID allocationId1 = allocationIds.take(); final CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot( slotPool, slotRequestId2); final AllocationID allocationId2 = allocationIds.take(); slotPool.releaseSlot(slotRequestId1, null); try { slotFuture1.get(); fail("The first slot future should have failed because it was cancelled."); } catch (ExecutionException ee) { assertTrue(ExceptionUtils.stripExecutionException(ee) instanceof FlinkException); } assertEquals(allocationId1, canceledAllocations.take()); assertTrue(registerAndOfferSlot(taskManagerLocation, slotPool, allocationId1)); assertEquals(allocationId1, slotFuture2.get().getAllocationId()); assertEquals(allocationId2, canceledAllocations.take()); } } /** * Tests that a SlotPoolImpl shutdown releases all registered slots. */ @Test public void testShutdownReleasesAllSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final int numSlotOffers = 2; final Collection<SlotOffer> slotOffers = new ArrayList<>(numSlotOffers); for (int i = 0; i < numSlotOffers; i++) { slotOffers.add( new SlotOffer( new AllocationID(), i, ResourceProfile.ANY)); } final ArrayBlockingQueue<AllocationID> freedSlotQueue = new ArrayBlockingQueue<>(numSlotOffers); taskManagerGateway.setFreeSlotFunction( (AllocationID allocationID, Throwable cause) -> { try { freedSlotQueue.put(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); } catch (InterruptedException e) { return FutureUtils.completedExceptionally(e); } }); final Collection<SlotOffer> acceptedSlotOffers = slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); assertThat(acceptedSlotOffers, Matchers.equalTo(slotOffers)); slotPool.close(); ArrayList<AllocationID> freedSlots = new ArrayList<>(numSlotOffers); while (freedSlots.size() < numSlotOffers) { freedSlotQueue.drainTo(freedSlots); } assertThat(freedSlots, Matchers.containsInAnyOrder(slotOffers.stream().map(SlotOffer::getAllocationId).toArray())); } } @Test public void testCheckIdleSlot() throws Exception { final ManualClock clock = new ManualClock(); try (TestingSlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, clock, TIMEOUT)) { final BlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1); taskManagerGateway.setFreeSlotFunction( (AllocationID allocationId, Throwable cause) -> { try { freedSlots.put(allocationId); return CompletableFuture.completedFuture(Acknowledge.get()); } catch (InterruptedException e) { return FutureUtils.completedExceptionally(e); } }); final AllocationID expiredSlotID = new AllocationID(); final AllocationID freshSlotID = new AllocationID(); final SlotOffer slotToExpire = new SlotOffer(expiredSlotID, 0, ResourceProfile.ANY); final SlotOffer slotToNotExpire = new SlotOffer(freshSlotID, 1, ResourceProfile.ANY); assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true)); assertThat( slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true)); clock.advanceTime(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToNotExpire), Matchers.is(true)); clock.advanceTime(1L, TimeUnit.MILLISECONDS); slotPool.triggerCheckIdleSlot(); final AllocationID freedSlot = freedSlots.poll(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(freedSlot, Matchers.is(expiredSlotID)); assertThat(freedSlots.isEmpty(), Matchers.is(true)); } } /** * Tests that idle slots which cannot be released will be discarded. See FLINK-11059. */ @Test public void testDiscardIdleSlotIfReleasingFailed() throws Exception { final ManualClock clock = new ManualClock(); try (TestingSlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, clock, TIMEOUT)) { final AllocationID expiredAllocationId = new AllocationID(); final SlotOffer slotToExpire = new SlotOffer(expiredAllocationId, 0, ResourceProfile.ANY); OneShotLatch freeSlotLatch = new OneShotLatch(); taskManagerGateway.setFreeSlotFunction((AllocationID allocationId, Throwable cause) -> { freeSlotLatch.trigger(); return FutureUtils.completedExceptionally(new TimeoutException("Test failure")); }); assertThat(slotPool.registerTaskManager(taskManagerLocation.getResourceID()), Matchers.is(true)); assertThat(slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotToExpire), Matchers.is(true)); clock.advanceTime(TIMEOUT.toMilliseconds() + 1, TimeUnit.MILLISECONDS); slotPool.triggerCheckIdleSlot(); freeSlotLatch.await(); final CompletableFuture<PhysicalSlot> allocatedSlotFuture = requestNewAllocatedSlot( slotPool, new SlotRequestId()); try { allocatedSlotFuture.get(10L, TimeUnit.MILLISECONDS); fail("Expected to fail with a timeout."); } catch (TimeoutException ignored) { assertEquals(0, slotPool.getAvailableSlots().size()); } } } /** * Tests that failed slots are freed on the {@link TaskExecutor}. */ @Test public void testFreeFailedSlots() throws Exception { try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final int parallelism = 5; final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(parallelism); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.offer(slotRequest.getAllocationId())); final Map<SlotRequestId, CompletableFuture<PhysicalSlot>> slotRequestFutures = new HashMap<>(parallelism); for (int i = 0; i < parallelism; i++) { final SlotRequestId slotRequestId = new SlotRequestId(); slotRequestFutures.put(slotRequestId, requestNewAllocatedSlot(slotPool, slotRequestId)); } final List<SlotOffer> slotOffers = new ArrayList<>(parallelism); for (int i = 0; i < parallelism; i++) { slotOffers.add(new SlotOffer(allocationIds.take(), i, ResourceProfile.ANY)); } slotPool.registerTaskManager(taskManagerLocation.getResourceID()); slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); FutureUtils.waitForAll(slotRequestFutures.values()).get(); final ArrayBlockingQueue<AllocationID> freedSlots = new ArrayBlockingQueue<>(1); taskManagerGateway.setFreeSlotFunction( (allocationID, throwable) -> { freedSlots.offer(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }); final FlinkException failException = new FlinkException("Test fail exception"); for (int i = 0; i < parallelism - 1; i++) { final SlotOffer slotOffer = slotOffers.get(i); Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation(slotOffer.getAllocationId(), failException); assertThat(emptyTaskExecutorFuture.isPresent(), is(false)); assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId()))); } final SlotOffer slotOffer = slotOffers.get(parallelism - 1); final Optional<ResourceID> emptyTaskExecutorFuture = slotPool.failAllocation( slotOffer.getAllocationId(), failException); assertThat(emptyTaskExecutorFuture.get(), is(equalTo(taskManagerLocation.getResourceID()))); assertThat(freedSlots.take(), is(equalTo(slotOffer.getAllocationId()))); } } /** * Tests that create report of allocated slots on a {@link TaskExecutor}. */ @Test public void testCreateAllocatedSlotReport() throws Exception { final JobID jobId = new JobID(); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway, jobId)) { final ArrayBlockingQueue<AllocationID> allocationIds = new ArrayBlockingQueue<>(1); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.offer(slotRequest.getAllocationId())); final CompletableFuture<PhysicalSlot> slotRequestFuture = requestNewAllocatedSlot( slotPool, new SlotRequestId()); final List<AllocatedSlotInfo> allocatedSlotInfos = new ArrayList<>(2); final List<SlotOffer> slotOffers = new ArrayList<>(2); final AllocationID allocatedId = allocationIds.take(); slotOffers.add(new SlotOffer(allocatedId, 0, ResourceProfile.ANY)); allocatedSlotInfos.add(new AllocatedSlotInfo(0, allocatedId)); final AllocationID availableId = new AllocationID(); slotOffers.add(new SlotOffer(availableId, 1, ResourceProfile.ANY)); allocatedSlotInfos.add(new AllocatedSlotInfo(1, availableId)); slotPool.registerTaskManager(taskManagerLocation.getResourceID()); slotPool.offerSlots(taskManagerLocation, taskManagerGateway, slotOffers); slotRequestFuture.get(); final AllocatedSlotReport slotReport = slotPool.createAllocatedSlotReport(taskManagerLocation.getResourceID()); assertThat(jobId, is(slotReport.getJobId())); assertThat(slotReport.getAllocatedSlotInfos(), containsInAnyOrder(isEachEqual(allocatedSlotInfos))); } } @Test public void testCalculationOfTaskExecutorUtilization() throws Exception { try (final SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final TaskManagerLocation firstTaskManagerLocation = new LocalTaskManagerLocation(); final TaskManagerLocation secondTaskManagerLocation = new LocalTaskManagerLocation(); final List<AllocationID> firstTaskManagersSlots = registerAndOfferSlots(firstTaskManagerLocation, slotPool, 4); final List<AllocationID> secondTaskManagersSlots = registerAndOfferSlots(secondTaskManagerLocation, slotPool, 4); slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(0)); slotPool.allocateAvailableSlot(new SlotRequestId(), firstTaskManagersSlots.get(1)); slotPool.allocateAvailableSlot(new SlotRequestId(), secondTaskManagersSlots.get(3)); final Collection<SlotInfoWithUtilization> availableSlotsInformation = slotPool.getAvailableSlotsInformation(); final Map<TaskManagerLocation, Double> utilizationPerTaskExecutor = ImmutableMap.of( firstTaskManagerLocation, 2.0 / 4, secondTaskManagerLocation, 1.0 / 4); for (SlotInfoWithUtilization slotInfoWithUtilization : availableSlotsInformation) { final double expectedTaskExecutorUtilization = utilizationPerTaskExecutor.get(slotInfoWithUtilization.getTaskManagerLocation()); assertThat(slotInfoWithUtilization.getTaskExecutorUtilization(), is(closeTo(expectedTaskExecutorUtilization, 0.1))); } } } @Test public void testOrphanedAllocationCanBeRemapped() throws Exception { final List<AllocationID> allocationIds = new ArrayList<>(); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.add(slotRequest.getAllocationId())); final List<AllocationID> canceledAllocations = new ArrayList<>(); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add); try (SlotPoolImpl slotPool = createAndSetUpSlotPool(resourceManagerGateway)) { final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2); final AllocationID allocationId1 = allocationIds.get(0); final AllocationID allocationId2 = allocationIds.get(1); registerAndOfferSlot(taskManagerLocation, slotPool, allocationId2); assertThat(slotPool.getPendingRequests().values(), hasSize(1)); assertThat(slotPool.getPendingRequests().containsKeyA(slotRequestId2), is(true)); assertThat(slotPool.getPendingRequests().containsKeyB(allocationId1), is(true)); assertThat(canceledAllocations, hasSize(0)); } } @Test public void testOrphanedAllocationIsCanceledIfNotRemapped() throws Exception { final List<AllocationID> allocationIds = new ArrayList<>(); resourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIds.add(slotRequest.getAllocationId())); final List<AllocationID> canceledAllocations = new ArrayList<>(); resourceManagerGateway.setCancelSlotConsumer(canceledAllocations::add); try (SlotPoolImpl slotPool = SlotPoolUtils.createAndSetUpSlotPool(resourceManagerGateway)) { final SlotRequestId slotRequestId1 = new SlotRequestId(); final SlotRequestId slotRequestId2 = new SlotRequestId(); requestNewAllocatedSlots(slotPool, slotRequestId1, slotRequestId2); final AllocationID allocationId1 = allocationIds.get(0); final AllocationID allocationId2 = allocationIds.get(1); AllocationID randomAllocationId; do { randomAllocationId = new AllocationID(); } while (randomAllocationId.equals(allocationId1) || randomAllocationId.equals(allocationId2)); registerAndOfferSlot(taskManagerLocation, slotPool, randomAllocationId); assertThat(slotPool.getPendingRequests().values(), hasSize(1)); assertThat(canceledAllocations, contains(allocationId1)); } } /** * In this case a slot is offered to the SlotPoolImpl before the ResourceManager is connected. * It can happen in production if a TaskExecutor is reconnected to a restarted JobMaster. */ @Test public void testSlotsOfferedWithoutResourceManagerConnected() throws Exception { try (SlotPoolImpl slotPool = new TestingSlotPoolImpl(new JobID())) { slotPool.start(JobMasterId.generate(), "mock-address", mainThreadExecutor); final SlotRequestId slotRequestId = new SlotRequestId(); final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, slotRequestId); assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(1)); final AllocationID allocationId = new AllocationID(); registerAndOfferSlot(taskManagerLocation, slotPool, allocationId); assertThat(slotPool.getWaitingForResourceManager().values(), hasSize(0)); assertThat(slotFuture.isDone(), is(true)); assertThat(slotFuture.isCompletedExceptionally(), is(false)); assertThat(slotFuture.getNow(null).getAllocationId(), is(allocationId)); } } private static TestingSlotPoolImpl createAndSetUpSlotPool( final ResourceManagerGateway resourceManagerGateway) throws Exception { return new SlotPoolBuilder(mainThreadExecutor).setResourceManagerGateway(resourceManagerGateway).build(); } private static TestingSlotPoolImpl createAndSetUpSlotPool( final ResourceManagerGateway resourceManagerGateway, final JobID jobId) throws Exception { return new SlotPoolBuilder(mainThreadExecutor) .setResourceManagerGateway(resourceManagerGateway) .setJobId(jobId) .build(); } private static TestingSlotPoolImpl createAndSetUpSlotPool( final ResourceManagerGateway resourceManagerGateway, final Clock clock, final Time idleSlotTimeout) throws Exception { return new SlotPoolBuilder(mainThreadExecutor) .setResourceManagerGateway(resourceManagerGateway) .setClock(clock) .setIdleSlotTimeout(idleSlotTimeout) .build(); } private boolean registerAndOfferSlot( final TaskManagerLocation taskManagerLocation, final SlotPoolImpl slotPool, final AllocationID allocationId) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.ANY); return slotPool.offerSlot(taskManagerLocation, taskManagerGateway, slotOffer); } private List<AllocationID> registerAndOfferSlots(TaskManagerLocation taskManagerLocation, SlotPoolImpl slotPool, int numberOfSlotsToRegister) { slotPool.registerTaskManager(taskManagerLocation.getResourceID()); final List<AllocationID> allocationIds = IntStream.range(0, numberOfSlotsToRegister) .mapToObj(ignored -> new AllocationID()) .collect(Collectors.toList()); Collection<SlotOffer> slotOffers = IntStream.range(0, numberOfSlotsToRegister) .mapToObj(index -> new SlotOffer(allocationIds.get(index), index, ResourceProfile.ANY)) .collect(Collectors.toList()); slotPool.offerSlots( taskManagerLocation, new SimpleAckingTaskManagerGateway(), slotOffers); return allocationIds; } private static Collection<Matcher<? super AllocatedSlotInfo>> isEachEqual(Collection<AllocatedSlotInfo> allocatedSlotInfos) { return allocatedSlotInfos .stream() .map(SlotPoolImplTest::isEqualAllocatedSlotInfo) .collect(Collectors.toList()); } private static Matcher<AllocatedSlotInfo> isEqualAllocatedSlotInfo(AllocatedSlotInfo expectedAllocatedSlotInfo) { return new TypeSafeDiagnosingMatcher<AllocatedSlotInfo>() { @Override public void describeTo(Description description) { description.appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo)); } private String describeAllocatedSlotInformation(AllocatedSlotInfo expectedAllocatedSlotInformation) { return expectedAllocatedSlotInformation.toString(); } @Override protected boolean matchesSafely(AllocatedSlotInfo item, Description mismatchDescription) { final boolean matches = item.getAllocationId().equals(expectedAllocatedSlotInfo.getAllocationId()) && item.getSlotIndex() == expectedAllocatedSlotInfo.getSlotIndex(); if (!matches) { mismatchDescription .appendText("Actual value ") .appendText(describeAllocatedSlotInformation(item)) .appendText(" differs from expected value ") .appendText(describeAllocatedSlotInformation(expectedAllocatedSlotInfo)); } return matches; } }; } }
It's better to just let the `SecretNotFoundException` be thrown as it's more descriptive than `orElseThrow()`.
private static TlsCredentials latestValidCredentials(SecretStore secretStore, TlsConfig tlsConfig) { int version = latestVersionInSecretStore(secretStore, tlsConfig).orElseThrow(); return new TlsCredentials(certificates(secretStore, tlsConfig, version), privateKey(secretStore, tlsConfig, version)); }
int version = latestVersionInSecretStore(secretStore, tlsConfig).orElseThrow();
private static TlsCredentials latestValidCredentials(SecretStore secretStore, TlsConfig tlsConfig) { int version = latestVersionInSecretStore(secretStore, tlsConfig); return new TlsCredentials(certificates(secretStore, tlsConfig, version), privateKey(secretStore, tlsConfig, version)); }
class ControllerSslContextFactoryProvider extends TlsContextBasedProvider { private final KeyStore truststore; private final KeyStore keystore; private final Map<Integer, TlsContext> tlsContextMap = new ConcurrentHashMap<>(); @Inject public ControllerSslContextFactoryProvider(SecretStore secretStore, TlsConfig config) { if (!Files.isReadable(Paths.get(config.caTrustStore()))) { throw new IllegalArgumentException("CA trust store file is not readable: " + config.caTrustStore()); } this.truststore = KeyStoreBuilder.withType(KeyStoreType.JKS) .fromFile(Paths.get(config.caTrustStore())) .build(); TlsCredentials tlsCredentials = latestValidCredentials(secretStore, config); this.keystore = KeyStoreBuilder.withType(KeyStoreType.JKS) .withKeyEntry(getClass().getSimpleName(), tlsCredentials.privateKey, tlsCredentials.certificates) .build(); } @Override protected TlsContext getTlsContext(String containerId, int port) { return tlsContextMap.computeIfAbsent(port, this::createTlsContext); } private TlsContext createTlsContext(int port) { return new DefaultTlsContext( new SslContextBuilder() .withKeyStore(keystore, new char[0]) .withTrustStore(truststore) .build(), port != 443 ? PeerAuthentication.WANT : PeerAuthentication.DISABLED); } record TlsCredentials(List<X509Certificate> certificates, PrivateKey privateKey){} private static OptionalInt latestVersionInSecretStore(SecretStore secretStore, TlsConfig tlsConfig) { try { var certVersions = new HashSet<>(secretStore.listSecretVersions(tlsConfig.certificateSecret())); var keyVersions = new HashSet<>(secretStore.listSecretVersions(tlsConfig.privateKeySecret())); return Sets.intersection(certVersions, keyVersions).stream().mapToInt(Integer::intValue).max(); } catch (SecretNotFoundException s) { return OptionalInt.empty(); } } /** Get private key from secret store **/ private static PrivateKey privateKey(SecretStore secretStore, TlsConfig config, int version) { return KeyUtils.fromPemEncodedPrivateKey(secretStore.getSecret(config.privateKeySecret(), version)); } /** * Get certificate from secret store. If certificate secret contains multiple certificates, e.g. intermediate * certificates, the entire chain will be read */ private static List<X509Certificate> certificates(SecretStore secretStore, TlsConfig config, int version) { return X509CertificateUtils.certificateListFromPem(secretStore.getSecret(config.certificateSecret(), version)); } }
class ControllerSslContextFactoryProvider extends TlsContextBasedProvider { private final KeyStore truststore; private final KeyStore keystore; private final Map<Integer, TlsContext> tlsContextMap = new ConcurrentHashMap<>(); @Inject public ControllerSslContextFactoryProvider(SecretStore secretStore, TlsConfig config) { if (!Files.isReadable(Paths.get(config.caTrustStore()))) { throw new IllegalArgumentException("CA trust store file is not readable: " + config.caTrustStore()); } this.truststore = KeyStoreBuilder.withType(KeyStoreType.JKS) .fromFile(Paths.get(config.caTrustStore())) .build(); TlsCredentials tlsCredentials = latestValidCredentials(secretStore, config); this.keystore = KeyStoreBuilder.withType(KeyStoreType.JKS) .withKeyEntry(getClass().getSimpleName(), tlsCredentials.privateKey, tlsCredentials.certificates) .build(); } @Override protected TlsContext getTlsContext(String containerId, int port) { return tlsContextMap.computeIfAbsent(port, this::createTlsContext); } private TlsContext createTlsContext(int port) { return new DefaultTlsContext( new SslContextBuilder() .withKeyStore(keystore, new char[0]) .withTrustStore(truststore) .build(), port != 443 ? PeerAuthentication.WANT : PeerAuthentication.DISABLED); } record TlsCredentials(List<X509Certificate> certificates, PrivateKey privateKey){} private static int latestVersionInSecretStore(SecretStore secretStore, TlsConfig tlsConfig) { var certVersions = new HashSet<>(secretStore.listSecretVersions(tlsConfig.certificateSecret())); var keyVersions = new HashSet<>(secretStore.listSecretVersions(tlsConfig.privateKeySecret())); return Sets.intersection(certVersions, keyVersions).stream().mapToInt(Integer::intValue).max().orElseThrow( () -> new RuntimeException("No valid certificate versions found in secret store!") ); } /** Get private key from secret store **/ private static PrivateKey privateKey(SecretStore secretStore, TlsConfig config, int version) { return KeyUtils.fromPemEncodedPrivateKey(secretStore.getSecret(config.privateKeySecret(), version)); } /** * Get certificate from secret store. If certificate secret contains multiple certificates, e.g. intermediate * certificates, the entire chain will be read */ private static List<X509Certificate> certificates(SecretStore secretStore, TlsConfig config, int version) { return X509CertificateUtils.certificateListFromPem(secretStore.getSecret(config.certificateSecret(), version)); } }
Possibly double that as we do prepare, then prepareWithLocks.
public boolean canAllocateTenantNodeTo(Node host, boolean dynamicProvisioning) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if (suspended(host)) return false; if (dynamicProvisioning) return EnumSet.of(Node.State.active, Node.State.ready, Node.State.provisioned).contains(host.state()); else return host.state() == Node.State.active; }
if (suspended(host)) return false;
public boolean canAllocateTenantNodeTo(Node host, boolean dynamicProvisioning) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if (suspended(host)) return false; if (dynamicProvisioning) return EnumSet.of(Node.State.active, Node.State.ready, Node.State.provisioned).contains(host.state()); else return host.state() == Node.State.active; }
class Nodes { private static final Logger log = Logger.getLogger(Nodes.class.getName()); private final CuratorDatabaseClient db; private final Zone zone; private final Clock clock; private final Orchestrator orchestrator; public Nodes(CuratorDatabaseClient db, Zone zone, Clock clock, Orchestrator orchestrator) { this.zone = zone; this.clock = clock; this.db = db; this.orchestrator = orchestrator; } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ public void rewrite() { Instant start = clock.instant(); int nodesWritten = 0; for (Node.State state : Node.State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> node(String hostname, Node.State... inState) { return db.readNode(hostname, inState); } /** * Returns a list of nodes in this repository in any of the given states * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned */ public NodeList list(Node.State... inState) { return NodeList.copyOf(db.readNodes(inState)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(list().asList(), lock); } /** * Returns whether the zone managed by this node repository seems to be working. * If too many nodes are not responding, there is probably some zone-wide issue * and we should probably refrain from making changes to it. */ public boolean isWorking() { NodeList activeNodes = list(Node.State.active); if (activeNodes.size() <= 5) return true; NodeList downNodes = activeNodes.down(); return ! ( (double)downNodes.size() / (double)activeNodes.size() > 0.2 ); } /** Adds a list of newly created reserved nodes to the node repository */ public List<Node> addReservedNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER) illegal("Cannot add " + node + ": This is not a child node"); if (node.allocation().isEmpty()) illegal("Cannot add " + node + ": Child nodes need to be allocated"); Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), Node.State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as provisioned nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != Node.State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); boolean rebuilding = existing.get().status().wantToRebuild(); if (rebuilding) { node = node.with(node.status().withWantToRetire(existing.get().status().wantToRetire(), false, rebuilding)); } nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } NestedTransaction transaction = new NestedTransaction(); List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), Node.State.provisioned, agent, transaction); db.removeNodes(nodesToRemove, transaction); transaction.commit(); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != Node.State.provisioned && node.state() != Node.State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(Node.State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = requireNode(hostname); if (nodeToReady.state() == Node.State.ready) return nodeToReady; return setReady(List.of(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(Node.State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(Node.State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream() .map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, ApplicationTransaction transaction) { if ( ! zone.environment().isProduction() || zone.system().isCd()) return deallocate(nodes, Agent.application, "Deactivated by application", transaction.nested()); var stateless = NodeList.copyOf(nodes).stateless(); var stateful = NodeList.copyOf(nodes).stateful(); List<Node> written = new ArrayList<>(); written.addAll(deallocate(stateless.asList(), Agent.application, "Deactivated by application", transaction.nested())); written.addAll(db.writeTo(Node.State.inactive, stateful.asList(), Agent.application, Optional.empty(), transaction.nested())); return written; } /** * Fails these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> fail(List<Node> nodes, ApplicationTransaction transaction) { return fail(nodes, Agent.application, "Failed by application", transaction.nested()); } public List<Node> fail(List<Node> nodes, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); nodes = fail(nodes, agent, reason, transaction); transaction.commit(); return nodes; } private List<Node> fail(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { nodes = nodes.stream() .map(n -> n.withWantToFail(false, agent, clock.instant())) .collect(Collectors.toList()); return db.writeTo(Node.State.failed, nodes, agent, Optional.of(reason), transaction); } /** Move nodes to the dirty state */ public List<Node> deallocate(List<Node> nodes, Agent agent, String reason) { return performOn(NodeList.copyOf(nodes), (node, lock) -> deallocate(node, agent, reason)); } public List<Node> deallocateRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = node(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != Node.State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != Node.State.provisioned) .filter(node -> node.state() != Node.State.failed) .filter(node -> node.state() != Node.State.parked) .filter(node -> node.state() != Node.State.breakfixed) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked, breakfixed]"); return nodesToDirty.stream().map(node -> deallocate(node, agent, reason)).collect(Collectors.toList()); } /** * Set a node dirty or parked, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. */ public Node deallocate(Node node, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node deallocated = deallocate(node, agent, reason, transaction); transaction.commit(); return deallocated; } public List<Node> deallocate(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { return nodes.stream().map(node -> deallocate(node, agent, reason, transaction)).collect(Collectors.toList()); } public Node deallocate(Node node, Agent agent, String reason, NestedTransaction transaction) { if (parkOnDeallocationOf(node, agent)) { return park(node.hostname(), false, agent, reason, transaction); } else { return db.writeTo(Node.State.dirty, List.of(node), agent, Optional.of(reason), transaction).get(0); } } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return fail(hostname, true, agent, reason); } public Node fail(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, Node.State.failed, agent, keepAllocation, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * Non-active nodes are failed immediately, while active nodes are marked as wantToFail. * The host is failed if it has no active nodes and marked wantToFail if it has. * * @return all the nodes that were changed by this request */ public List<Node> failOrMarkRecursively(String hostname, Agent agent, String reason) { NodeList children = list().childrenOf(hostname); List<Node> changed = performOn(children, (node, lock) -> failOrMark(node, agent, reason, lock)); if (children.state(Node.State.active).isEmpty()) changed.add(move(hostname, Node.State.failed, agent, true, Optional.of(reason))); else changed.addAll(performOn(NodeList.of(node(hostname).orElseThrow()), (node, lock) -> failOrMark(node, agent, reason, lock))); return changed; } private Node failOrMark(Node node, Agent agent, String reason, Mutex lock) { if (node.state() == Node.State.active) { node = node.withWantToFail(true, agent, clock.instant()); write(node, lock); return node; } else { return move(node.hostname(), Node.State.failed, agent, true, Optional.of(reason)); } } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node parked = park(hostname, keepAllocation, agent, reason, transaction); transaction.commit(); return parked; } private Node park(String hostname, boolean keepAllocation, Agent agent, String reason, NestedTransaction transaction) { return move(hostname, Node.State.parked, agent, keepAllocation, Optional.of(reason), transaction); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, Node.State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, Node.State.active, agent, true, Optional.of(reason)); } /** * Moves a host to breakfixed state, removing any children. */ public List<Node> breakfixRecursively(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); try (Mutex lock = lockUnallocated()) { requireBreakfixable(node); NestedTransaction transaction = new NestedTransaction(); List<Node> removed = removeChildren(node, false, transaction); removed.add(move(node.hostname(), Node.State.breakfixed, agent, true, Optional.of(reason), transaction)); transaction.commit(); return removed; } } private List<Node> moveRecursively(String hostname, Node.State toState, Agent agent, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child.hostname(), toState, agent, true, reason, transaction)) .collect(Collectors.toList()); moved.add(move(hostname, toState, agent, true, reason, transaction)); transaction.commit(); return moved; } /** Move a node to given state */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); Node moved = move(hostname, toState, agent, keepAllocation, reason, transaction); transaction.commit(); return moved; } /** Move a node to given state as part of a transaction */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason, NestedTransaction transaction) { try (NodeMutex lock = lockAndGetRequired(hostname)) { Node node = lock.node(); if (toState == Node.State.active) { if (node.allocation().isEmpty()) illegal("Could not set " + node + " active: It has no allocation"); if (!keepAllocation) illegal("Could not set " + node + " active: Requested to discard allocation"); for (Node currentActive : list(Node.State.active).owner(node.allocation().get().owner())) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } if (toState == Node.State.deprovisioned) { node = node.with(IP.Config.EMPTY); } return db.writeTo(toState, List.of(node), agent, reason, transaction).get(0); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For Linux * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != Node.State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == Node.State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::node).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(List.of(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = requireNode(hostname); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); NestedTransaction transaction = new NestedTransaction(); final List<Node> removed; if (!node.type().isHost()) { removed = List.of(node); db.removeNodes(removed, transaction); } else { removed = removeChildren(node, force, transaction); if (zone.getCloud().dynamicProvisioning()) { db.removeNodes(List.of(node), transaction); } else { move(node.hostname(), Node.State.deprovisioned, Agent.system, false, Optional.empty(), transaction); } removed.add(node); } transaction.commit(); return removed; } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != Node.State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); if (node.status().wantToRebuild()) throw new IllegalArgumentException(node + " is rebuilding and cannot be forgotten"); NestedTransaction transaction = new NestedTransaction(); db.removeNodes(List.of(node), transaction); transaction.commit(); } private List<Node> removeChildren(Node node, boolean force, NestedTransaction transaction) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children, transaction); return new ArrayList<>(children); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: * - non-recursively: node is unallocated * - recursively: node is unallocated or node is in failed|parked * - Host node: iff in state provisioned|failed|parked * - Child node: * - non-recursively: node in state ready * - recursively: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingRecursively, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) { EnumSet<Node.State> removableStates = EnumSet.of(Node.State.failed, Node.State.parked); if (!removingRecursively || !removableStates.contains(node.state())) illegal(node + " is currently allocated and cannot be removed while in " + node.state()); } final Set<Node.State> removableStates; if (node.type().isHost()) { removableStates = EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked); } else { removableStates = removingRecursively ? EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked, Node.State.dirty, Node.State.ready) : EnumSet.of(Node.State.ready); } if (!removableStates.contains(node.state())) illegal(node + " can not be removed while in " + node.state()); } /** * Throws if given node cannot be breakfixed. * Breakfix is allowed if the following is true: * - Node is tenant host * - Node is in zone without dynamic provisioning * - Node is in parked or failed state */ private void requireBreakfixable(Node node) { if (zone.getCloud().dynamicProvisioning()) { illegal("Can not breakfix in zone: " + zone); } if (node.type() != NodeType.host) { illegal(node + " can not be breakfixed as it is not a tenant host"); } Set<Node.State> legalStates = EnumSet.of(Node.State.failed, Node.State.parked); if (! legalStates.contains(node.state())) { illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restartActive(Predicate<Node> filter) { return restart(NodeFilter.in(Set.of(Node.State.active)).and(filter)); } /** * Increases the restart generation of the any nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restart(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * * @return the nodes in their new state */ public List<Node> reboot(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state */ public List<Node> upgradeOs(Predicate<Node> filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(Predicate<Node> filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** Retire and deprovision given host and all of its children */ public List<Node> deprovision(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.deprovision, agent, instant); } /** Retire and rebuild given host and all of its children */ public List<Node> rebuild(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.rebuild, agent, instant); } private List<Node> decommission(String hostname, DecommissionOperation op, Agent agent, Instant instant) { Optional<NodeMutex> nodeMutex = lockAndGet(hostname); if (nodeMutex.isEmpty()) return List.of(); Node host = nodeMutex.get().node(); if (!host.type().isHost()) throw new IllegalArgumentException("Cannot " + op + " non-host " + host); List<Node> result; boolean wantToDeprovision = op == DecommissionOperation.deprovision; boolean wantToRebuild = op == DecommissionOperation.rebuild; try (NodeMutex lock = nodeMutex.get(); Mutex allocationLock = lockUnallocated()) { host = lock.node(); result = performOn(list(allocationLock).childrenOf(host), (node, nodeLock) -> { Node newNode = node.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); return write(newNode, nodeLock); }); Node newHost = host.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); result.add(write(newHost, lock)); } return result; } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } private List<Node> performOn(Predicate<Node> filter, BiFunction<Node, Mutex, Node> action) { return performOn(list().matching(filter), action); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ private List<Node> performOn(NodeList nodes, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : nodes) { if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; } public boolean canAllocateTenantNodeTo(Node host) { return canAllocateTenantNodeTo(host, zone.getCloud().dynamicProvisioning()); } public boolean suspended(Node node) { try { return orchestrator.getNodeStatus(new HostName(node.hostname())).isSuspended(); } catch (HostNameNotFoundException e) { return false; } } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(Node node) { Node staleNode = node; final int maxRetries = 4; for (int i = 0; i < maxRetries; ++i) { Mutex lockToClose = lock(staleNode); try { Optional<Node> freshNode = node(staleNode.hostname(), staleNode.state()); if (freshNode.isEmpty()) { freshNode = node(staleNode.hostname()); if (freshNode.isEmpty()) { return Optional.empty(); } } if (Objects.equals(freshNode.get().allocation().map(Allocation::owner), staleNode.allocation().map(Allocation::owner))) { NodeMutex nodeMutex = new NodeMutex(freshNode.get(), lockToClose); lockToClose = null; return Optional.of(nodeMutex); } staleNode = freshNode.get(); } finally { if (lockToClose != null) lockToClose.close(); } } throw new IllegalStateException("Giving up (after " + maxRetries + " attempts) " + "fetching an up to date node under lock: " + node.hostname()); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(String hostname) { return node(hostname).flatMap(this::lockAndGet); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(Node node) { return lockAndGet(node).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + node.hostname() + "'")); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(String hostname) { return lockAndGet(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private Node requireNode(String hostname) { return node(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private void illegal(String message) { throw new IllegalArgumentException(message); } /** Returns whether node should be parked when deallocated by given agent */ private static boolean parkOnDeallocationOf(Node node, Agent agent) { if (node.state() == Node.State.parked) return false; if (agent == Agent.operator) return false; if (!node.type().isHost() && node.status().wantToDeprovision()) return false; boolean retirementRequestedByOperator = node.status().wantToRetire() && node.history().event(History.Event.Type.wantToRetire) .map(History.Event::agent) .map(a -> a == Agent.operator) .orElse(false); return node.status().wantToDeprovision() || node.status().wantToRebuild() || retirementRequestedByOperator; } /** The different ways a host can be decommissioned */ private enum DecommissionOperation { deprovision, rebuild, } }
class Nodes { private static final Logger log = Logger.getLogger(Nodes.class.getName()); private final CuratorDatabaseClient db; private final Zone zone; private final Clock clock; private final Orchestrator orchestrator; public Nodes(CuratorDatabaseClient db, Zone zone, Clock clock, Orchestrator orchestrator) { this.zone = zone; this.clock = clock; this.db = db; this.orchestrator = orchestrator; } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ public void rewrite() { Instant start = clock.instant(); int nodesWritten = 0; for (Node.State state : Node.State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> node(String hostname, Node.State... inState) { return db.readNode(hostname, inState); } /** * Returns a list of nodes in this repository in any of the given states * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned */ public NodeList list(Node.State... inState) { return NodeList.copyOf(db.readNodes(inState)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(list().asList(), lock); } /** * Returns whether the zone managed by this node repository seems to be working. * If too many nodes are not responding, there is probably some zone-wide issue * and we should probably refrain from making changes to it. */ public boolean isWorking() { NodeList activeNodes = list(Node.State.active); if (activeNodes.size() <= 5) return true; NodeList downNodes = activeNodes.down(); return ! ( (double)downNodes.size() / (double)activeNodes.size() > 0.2 ); } /** Adds a list of newly created reserved nodes to the node repository */ public List<Node> addReservedNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER) illegal("Cannot add " + node + ": This is not a child node"); if (node.allocation().isEmpty()) illegal("Cannot add " + node + ": Child nodes need to be allocated"); Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), Node.State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as provisioned nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != Node.State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); boolean rebuilding = existing.get().status().wantToRebuild(); if (rebuilding) { node = node.with(node.status().withWantToRetire(existing.get().status().wantToRetire(), false, rebuilding)); } nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } NestedTransaction transaction = new NestedTransaction(); List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), Node.State.provisioned, agent, transaction); db.removeNodes(nodesToRemove, transaction); transaction.commit(); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != Node.State.provisioned && node.state() != Node.State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(Node.State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = requireNode(hostname); if (nodeToReady.state() == Node.State.ready) return nodeToReady; return setReady(List.of(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(Node.State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(Node.State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream() .map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, ApplicationTransaction transaction) { if ( ! zone.environment().isProduction() || zone.system().isCd()) return deallocate(nodes, Agent.application, "Deactivated by application", transaction.nested()); var stateless = NodeList.copyOf(nodes).stateless(); var stateful = NodeList.copyOf(nodes).stateful(); List<Node> written = new ArrayList<>(); written.addAll(deallocate(stateless.asList(), Agent.application, "Deactivated by application", transaction.nested())); written.addAll(db.writeTo(Node.State.inactive, stateful.asList(), Agent.application, Optional.empty(), transaction.nested())); return written; } /** * Fails these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> fail(List<Node> nodes, ApplicationTransaction transaction) { return fail(nodes, Agent.application, "Failed by application", transaction.nested()); } public List<Node> fail(List<Node> nodes, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); nodes = fail(nodes, agent, reason, transaction); transaction.commit(); return nodes; } private List<Node> fail(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { nodes = nodes.stream() .map(n -> n.withWantToFail(false, agent, clock.instant())) .collect(Collectors.toList()); return db.writeTo(Node.State.failed, nodes, agent, Optional.of(reason), transaction); } /** Move nodes to the dirty state */ public List<Node> deallocate(List<Node> nodes, Agent agent, String reason) { return performOn(NodeList.copyOf(nodes), (node, lock) -> deallocate(node, agent, reason)); } public List<Node> deallocateRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = node(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != Node.State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != Node.State.provisioned) .filter(node -> node.state() != Node.State.failed) .filter(node -> node.state() != Node.State.parked) .filter(node -> node.state() != Node.State.breakfixed) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked, breakfixed]"); return nodesToDirty.stream().map(node -> deallocate(node, agent, reason)).collect(Collectors.toList()); } /** * Set a node dirty or parked, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. */ public Node deallocate(Node node, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node deallocated = deallocate(node, agent, reason, transaction); transaction.commit(); return deallocated; } public List<Node> deallocate(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { return nodes.stream().map(node -> deallocate(node, agent, reason, transaction)).collect(Collectors.toList()); } public Node deallocate(Node node, Agent agent, String reason, NestedTransaction transaction) { if (parkOnDeallocationOf(node, agent)) { return park(node.hostname(), false, agent, reason, transaction); } else { return db.writeTo(Node.State.dirty, List.of(node), agent, Optional.of(reason), transaction).get(0); } } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return fail(hostname, true, agent, reason); } public Node fail(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, Node.State.failed, agent, keepAllocation, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * Non-active nodes are failed immediately, while active nodes are marked as wantToFail. * The host is failed if it has no active nodes and marked wantToFail if it has. * * @return all the nodes that were changed by this request */ public List<Node> failOrMarkRecursively(String hostname, Agent agent, String reason) { NodeList children = list().childrenOf(hostname); List<Node> changed = performOn(children, (node, lock) -> failOrMark(node, agent, reason, lock)); if (children.state(Node.State.active).isEmpty()) changed.add(move(hostname, Node.State.failed, agent, true, Optional.of(reason))); else changed.addAll(performOn(NodeList.of(node(hostname).orElseThrow()), (node, lock) -> failOrMark(node, agent, reason, lock))); return changed; } private Node failOrMark(Node node, Agent agent, String reason, Mutex lock) { if (node.state() == Node.State.active) { node = node.withWantToFail(true, agent, clock.instant()); write(node, lock); return node; } else { return move(node.hostname(), Node.State.failed, agent, true, Optional.of(reason)); } } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node parked = park(hostname, keepAllocation, agent, reason, transaction); transaction.commit(); return parked; } private Node park(String hostname, boolean keepAllocation, Agent agent, String reason, NestedTransaction transaction) { return move(hostname, Node.State.parked, agent, keepAllocation, Optional.of(reason), transaction); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, Node.State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, Node.State.active, agent, true, Optional.of(reason)); } /** * Moves a host to breakfixed state, removing any children. */ public List<Node> breakfixRecursively(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); try (Mutex lock = lockUnallocated()) { requireBreakfixable(node); NestedTransaction transaction = new NestedTransaction(); List<Node> removed = removeChildren(node, false, transaction); removed.add(move(node.hostname(), Node.State.breakfixed, agent, true, Optional.of(reason), transaction)); transaction.commit(); return removed; } } private List<Node> moveRecursively(String hostname, Node.State toState, Agent agent, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child.hostname(), toState, agent, true, reason, transaction)) .collect(Collectors.toList()); moved.add(move(hostname, toState, agent, true, reason, transaction)); transaction.commit(); return moved; } /** Move a node to given state */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); Node moved = move(hostname, toState, agent, keepAllocation, reason, transaction); transaction.commit(); return moved; } /** Move a node to given state as part of a transaction */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason, NestedTransaction transaction) { try (NodeMutex lock = lockAndGetRequired(hostname)) { Node node = lock.node(); if (toState == Node.State.active) { if (node.allocation().isEmpty()) illegal("Could not set " + node + " active: It has no allocation"); if (!keepAllocation) illegal("Could not set " + node + " active: Requested to discard allocation"); for (Node currentActive : list(Node.State.active).owner(node.allocation().get().owner())) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } if (toState == Node.State.deprovisioned) { node = node.with(IP.Config.EMPTY); } return db.writeTo(toState, List.of(node), agent, reason, transaction).get(0); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For Linux * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != Node.State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == Node.State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::node).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(List.of(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = requireNode(hostname); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); NestedTransaction transaction = new NestedTransaction(); final List<Node> removed; if (!node.type().isHost()) { removed = List.of(node); db.removeNodes(removed, transaction); } else { removed = removeChildren(node, force, transaction); if (zone.getCloud().dynamicProvisioning()) { db.removeNodes(List.of(node), transaction); } else { move(node.hostname(), Node.State.deprovisioned, Agent.system, false, Optional.empty(), transaction); } removed.add(node); } transaction.commit(); return removed; } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != Node.State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); if (node.status().wantToRebuild()) throw new IllegalArgumentException(node + " is rebuilding and cannot be forgotten"); NestedTransaction transaction = new NestedTransaction(); db.removeNodes(List.of(node), transaction); transaction.commit(); } private List<Node> removeChildren(Node node, boolean force, NestedTransaction transaction) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children, transaction); return new ArrayList<>(children); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: * - non-recursively: node is unallocated * - recursively: node is unallocated or node is in failed|parked * - Host node: iff in state provisioned|failed|parked * - Child node: * - non-recursively: node in state ready * - recursively: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingRecursively, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) { EnumSet<Node.State> removableStates = EnumSet.of(Node.State.failed, Node.State.parked); if (!removingRecursively || !removableStates.contains(node.state())) illegal(node + " is currently allocated and cannot be removed while in " + node.state()); } final Set<Node.State> removableStates; if (node.type().isHost()) { removableStates = EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked); } else { removableStates = removingRecursively ? EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked, Node.State.dirty, Node.State.ready) : EnumSet.of(Node.State.ready); } if (!removableStates.contains(node.state())) illegal(node + " can not be removed while in " + node.state()); } /** * Throws if given node cannot be breakfixed. * Breakfix is allowed if the following is true: * - Node is tenant host * - Node is in zone without dynamic provisioning * - Node is in parked or failed state */ private void requireBreakfixable(Node node) { if (zone.getCloud().dynamicProvisioning()) { illegal("Can not breakfix in zone: " + zone); } if (node.type() != NodeType.host) { illegal(node + " can not be breakfixed as it is not a tenant host"); } Set<Node.State> legalStates = EnumSet.of(Node.State.failed, Node.State.parked); if (! legalStates.contains(node.state())) { illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restartActive(Predicate<Node> filter) { return restart(NodeFilter.in(Set.of(Node.State.active)).and(filter)); } /** * Increases the restart generation of the any nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restart(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * * @return the nodes in their new state */ public List<Node> reboot(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state */ public List<Node> upgradeOs(Predicate<Node> filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(Predicate<Node> filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** Retire and deprovision given host and all of its children */ public List<Node> deprovision(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.deprovision, agent, instant); } /** Retire and rebuild given host and all of its children */ public List<Node> rebuild(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.rebuild, agent, instant); } private List<Node> decommission(String hostname, DecommissionOperation op, Agent agent, Instant instant) { Optional<NodeMutex> nodeMutex = lockAndGet(hostname); if (nodeMutex.isEmpty()) return List.of(); Node host = nodeMutex.get().node(); if (!host.type().isHost()) throw new IllegalArgumentException("Cannot " + op + " non-host " + host); List<Node> result; boolean wantToDeprovision = op == DecommissionOperation.deprovision; boolean wantToRebuild = op == DecommissionOperation.rebuild; try (NodeMutex lock = nodeMutex.get(); Mutex allocationLock = lockUnallocated()) { host = lock.node(); result = performOn(list(allocationLock).childrenOf(host), (node, nodeLock) -> { Node newNode = node.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); return write(newNode, nodeLock); }); Node newHost = host.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); result.add(write(newHost, lock)); } return result; } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } private List<Node> performOn(Predicate<Node> filter, BiFunction<Node, Mutex, Node> action) { return performOn(list().matching(filter), action); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ private List<Node> performOn(NodeList nodes, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : nodes) { if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; } public boolean canAllocateTenantNodeTo(Node host) { return canAllocateTenantNodeTo(host, zone.getCloud().dynamicProvisioning()); } public boolean suspended(Node node) { try { return orchestrator.getNodeStatus(new HostName(node.hostname())).isSuspended(); } catch (HostNameNotFoundException e) { return false; } } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(Node node) { Node staleNode = node; final int maxRetries = 4; for (int i = 0; i < maxRetries; ++i) { Mutex lockToClose = lock(staleNode); try { Optional<Node> freshNode = node(staleNode.hostname(), staleNode.state()); if (freshNode.isEmpty()) { freshNode = node(staleNode.hostname()); if (freshNode.isEmpty()) { return Optional.empty(); } } if (Objects.equals(freshNode.get().allocation().map(Allocation::owner), staleNode.allocation().map(Allocation::owner))) { NodeMutex nodeMutex = new NodeMutex(freshNode.get(), lockToClose); lockToClose = null; return Optional.of(nodeMutex); } staleNode = freshNode.get(); } finally { if (lockToClose != null) lockToClose.close(); } } throw new IllegalStateException("Giving up (after " + maxRetries + " attempts) " + "fetching an up to date node under lock: " + node.hostname()); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(String hostname) { return node(hostname).flatMap(this::lockAndGet); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(Node node) { return lockAndGet(node).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + node.hostname() + "'")); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(String hostname) { return lockAndGet(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private Node requireNode(String hostname) { return node(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private void illegal(String message) { throw new IllegalArgumentException(message); } /** Returns whether node should be parked when deallocated by given agent */ private static boolean parkOnDeallocationOf(Node node, Agent agent) { if (node.state() == Node.State.parked) return false; if (agent == Agent.operator) return false; if (!node.type().isHost() && node.status().wantToDeprovision()) return false; boolean retirementRequestedByOperator = node.status().wantToRetire() && node.history().event(History.Event.Type.wantToRetire) .map(History.Event::agent) .map(a -> a == Agent.operator) .orElse(false); return node.status().wantToDeprovision() || node.status().wantToRebuild() || retirementRequestedByOperator; } /** The different ways a host can be decommissioned */ private enum DecommissionOperation { deprovision, rebuild, } }
instead of `break`, can we return `value` here? I think it will make things more clear. We can add a `else` block at the end, and just return `value` from else condition.
static Object getValue(JsonNode value) { if (value.isValueNode()) { switch (value.getNodeType()) { case BOOLEAN: return value.asBoolean(); case NUMBER: if (value.isInt()) { return value.asInt(); } else if (value.isLong()) { return value.asLong(); } else if (value.isDouble()) { return value.asDouble(); } break; case STRING: return value.asText(); default: throw new IllegalStateException("Unexpected value: " + value.getNodeType()); } } return value; }
break;
static Object getValue(JsonNode value) { if (value.isValueNode()) { switch (value.getNodeType()) { case BOOLEAN: return value.asBoolean(); case NUMBER: if (value.isInt()) { return value.asInt(); } else if (value.isLong()) { return value.asLong(); } else if (value.isDouble()) { return value.asDouble(); } else{ return value; } case STRING: return value.asText(); default: throw new IllegalStateException("Unexpected value: " + value.getNodeType()); } } return value; }
class JsonSerializable { private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper(); private final static Logger logger = LoggerFactory.getLogger(JsonSerializable.class); transient ObjectNode propertyBag = null; private ObjectMapper om; protected JsonSerializable() { this.propertyBag = OBJECT_MAPPER.createObjectNode(); } /** * Constructor. * * @param jsonString the json string that represents the JsonSerializable. * @param objectMapper the custom object mapper */ JsonSerializable(String jsonString, ObjectMapper objectMapper) { this.propertyBag = fromJson(jsonString); this.om = objectMapper; } /** * Constructor. * * @param jsonString the json string that represents the JsonSerializable. */ protected JsonSerializable(String jsonString) { this.propertyBag = fromJson(jsonString); } /** * Constructor. * * @param objectNode the {@link ObjectNode} that represent the {@link JsonSerializable} */ JsonSerializable(ObjectNode objectNode) { this.propertyBag = objectNode; } private static void checkForValidPOJO(Class<?> c) { if (c.isAnonymousClass() || c.isLocalClass()) { throw new IllegalArgumentException( String.format("%s can't be an anonymous or local class.", c.getName())); } if (c.isMemberClass() && !Modifier.isStatic(c.getModifiers())) { throw new IllegalArgumentException( String.format("%s must be static if it's a member class.", c.getName())); } } private ObjectMapper getMapper() { if (this.om != null) { return this.om; } return OBJECT_MAPPER; } void setMapper(ObjectMapper om) { this.om = om; } @JsonIgnore protected Logger getLogger() { return logger; } void populatePropertyBag() { } /** * Returns the propertybag(JSONObject) in a hashMap * * @return the HashMap. */ public Map<String, Object> getMap() { return getMapper().convertValue(this.propertyBag, HashMap.class); } /** * Checks whether a property exists. * * @param propertyName the property to look up. * @return true if the property exists. */ public boolean has(String propertyName) { return this.propertyBag.has(propertyName); } /** * Removes a value by propertyName. * * @param propertyName the property to remove. */ void remove(String propertyName) { this.propertyBag.remove(propertyName); } /** * Sets the value of a property. * * @param <T> the type of the object. * @param propertyName the property to set. * @param value the value of the property. */ @SuppressWarnings({"unchecked", "rawtypes"}) <T> void set(String propertyName, T value) { if (value == null) { this.propertyBag.putNull(propertyName); } else if (value instanceof Collection) { ArrayNode jsonArray = propertyBag.arrayNode(); this.internalSetCollection(propertyName, (Collection) value, jsonArray); this.propertyBag.set(propertyName, jsonArray); } else if (value instanceof JsonNode) { this.propertyBag.set(propertyName, (JsonNode) value); } else if (value instanceof JsonSerializable) { JsonSerializable castedValue = (JsonSerializable) value; if (castedValue != null) { castedValue.populatePropertyBag(); } this.propertyBag.set(propertyName, castedValue != null ? castedValue.propertyBag : null); } else { this.propertyBag.set(propertyName, getMapper().valueToTree(value)); } } @SuppressWarnings({"unchecked", "rawtypes"}) private <T> void internalSetCollection(String propertyName, Collection<T> collection, ArrayNode targetArray) { for (T childValue : collection) { if (childValue == null) { targetArray.addNull(); } else if (childValue instanceof Collection) { ArrayNode childArray = targetArray.addArray(); this.internalSetCollection(propertyName, (Collection) childValue, childArray); } else if (childValue instanceof JsonNode) { targetArray.add((JsonNode) childValue); } else if (childValue instanceof JsonSerializable) { JsonSerializable castedValue = (JsonSerializable) childValue; castedValue.populatePropertyBag(); targetArray.add(castedValue.propertyBag != null ? castedValue.propertyBag : this.getMapper().createObjectNode()); } else { targetArray.add(this.getMapper().valueToTree(childValue)); } } } /** * Gets a property value as Object. * * @param propertyName the property to get. * @return the value of the property. */ public Object get(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return getValue(this.propertyBag.get(propertyName)); } else { return null; } } /** * Gets a string value. * * @param propertyName the property to get. * @return the string value. */ public String getString(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return this.propertyBag.get(propertyName).asText(); } else { return null; } } /** * Gets a boolean value. * * @param propertyName the property to get. * @return the boolean value. */ public Boolean getBoolean(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return this.propertyBag.get(propertyName).asBoolean(); } else { return null; } } /** * Gets an integer value. * * @param propertyName the property to get. * @return the boolean value */ public Integer getInt(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return Integer.valueOf(this.propertyBag.get(propertyName).asInt()); } else { return null; } } /** * Gets a long value. * * @param propertyName the property to get. * @return the long value */ public Long getLong(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return Long.valueOf(this.propertyBag.get(propertyName).asLong()); } else { return null; } } /** * Gets a double value. * * @param propertyName the property to get. * @return the double value. */ public Double getDouble(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return new Double(this.propertyBag.get(propertyName).asDouble()); } else { return null; } } /** * Gets an object value. * * @param <T> the type of the object. * @param propertyName the property to get. * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object value. * @throws IllegalStateException thrown if an error occurs */ public <T> T getObject(String propertyName, Class<T> c, boolean... convertFromCamelCase) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { JsonNode jsonObj = propertyBag.get(propertyName); if (Number.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c) || Object.class == c) { return c.cast(getValue(jsonObj)); } else if (Enum.class.isAssignableFrom(c)) { try { String value = String.class.cast(getValue(jsonObj)); value = convertFromCamelCase.length > 0 && convertFromCamelCase[0] ? Strings.fromCamelCaseToUpperCase(value) : value; return c.cast(c.getMethod("valueOf", String.class).invoke(null, value)); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException("Failed to create enum.", e); } } else if (JsonSerializable.class.isAssignableFrom(c)) { try { Constructor<T> constructor = c.getDeclaredConstructor(String.class); if (Modifier.isPrivate(constructor.getModifiers())) { constructor.setAccessible(true); } return constructor.newInstance(toJson(jsonObj)); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException( "Failed to instantiate class object.", e); } } else { JsonSerializable.checkForValidPOJO(c); try { return this.getMapper().treeToValue(jsonObj, c); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } return null; } /** * Gets an object List. * * @param <T> the type of the objects in the List. * @param propertyName the property to get * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object collection. * @throws IllegalStateException thrown if an error occurs */ public <T> List<T> getList(String propertyName, Class<T> c, boolean... convertFromCamelCase) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { ArrayNode jsonArray = (ArrayNode) this.propertyBag.get(propertyName); ArrayList<T> result = new ArrayList<T>(); boolean isBaseClass = false; boolean isEnumClass = false; boolean isJsonSerializable = false; if (Number.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c) || Object.class == c) { isBaseClass = true; } else if (Enum.class.isAssignableFrom(c)) { isEnumClass = true; } else if (JsonSerializable.class.isAssignableFrom(c)) { isJsonSerializable = true; } else { JsonSerializable.checkForValidPOJO(c); } for (JsonNode n : jsonArray) { if (isBaseClass) { result.add(c.cast(getValue(n))); } else if (isEnumClass) { try { String value = String.class.cast(getValue(n)); value = convertFromCamelCase.length > 0 && convertFromCamelCase[0] ? Strings.fromCamelCaseToUpperCase(value) : value; result.add(c.cast(c.getMethod("valueOf", String.class).invoke(null, value))); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException("Failed to create enum.", e); } } else if (isJsonSerializable) { try { Constructor<T> constructor = c.getDeclaredConstructor(String.class); if (Modifier.isPrivate(constructor.getModifiers())) { constructor.setAccessible(true); } result.add(constructor.newInstance(toJson(n))); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException( "Failed to instantiate class object.", e); } } else { try { result.add(this.getMapper().treeToValue(n, c)); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } return result; } return null; } /** * Gets an object collection. * * @param <T> the type of the objects in the collection. * @param propertyName the property to get * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object collection. */ public <T> Collection<T> getCollection(String propertyName, Class<T> c, boolean... convertFromCamelCase) { return getList(propertyName, c, convertFromCamelCase); } /** * Gets a JSONObject. * * @param propertyName the property to get. * @return the JSONObject. */ ObjectNode getObject(String propertyName) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { ObjectNode jsonObj = (ObjectNode) this.propertyBag.get(propertyName); return jsonObj; } return null; } /** * Gets a JSONObject collection. * * @param propertyName the property to get. * @return the JSONObject collection. */ Collection<ObjectNode> getCollection(String propertyName) { Collection<ObjectNode> result = null; if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { result = new ArrayList<ObjectNode>(); for (JsonNode n : this.propertyBag.findValues(propertyName)) { result.add((ObjectNode) n); } } return result; } /** * Gets the value of a property identified by an array of property names that forms the path. * * @param propertyNames that form the path to the property to get. * @return the value of the property. */ public Object getObjectByPath(List<String> propertyNames) { ObjectNode propBag = this.propertyBag; JsonNode value = null; String propertyName = null; Integer matchedProperties = 0; Iterator<String> iterator = propertyNames.iterator(); if (iterator.hasNext()) { do { propertyName = iterator.next(); if (propBag.has(propertyName)) { matchedProperties++; value = propBag.get(propertyName); if (!value.isObject()) { break; } propBag = (ObjectNode) value; } else { break; } } while (iterator.hasNext()); if (value != null && matchedProperties == propertyNames.size()) { return getValue(value); } } return null; } private ObjectNode fromJson(String json) { try { return (ObjectNode) getMapper().readTree(json); } catch (IOException e) { throw new IllegalArgumentException( String.format("Unable to parse JSON %s", json), e); } } private String toJson(Object object) { try { return getMapper().writeValueAsString(object); } catch (JsonProcessingException e) { throw new IllegalStateException("Unable to convert JSON to STRING", e); } } private String toPrettyJson(Object object) { try { return getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(object); } catch (JsonProcessingException e) { throw new IllegalStateException("Unable to convert JSON to STRING", e); } } /** * Converts to an Object (only POJOs and JSONObject are supported). * * @param <T> the type of the object. * @param c the class of the object, either a POJO class or JSONObject. If c is a POJO class, it must be a member * (and not an anonymous or local) and a static one. * @return the POJO. * @throws IllegalArgumentException thrown if an error occurs */ public <T> T toObject(Class<T> c) { if (CosmosItemProperties.class.isAssignableFrom(c)) { return (T) new CosmosItemProperties(this.toJson()); } if (JsonSerializable.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Number.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c)) { return c.cast(this.get(Constants.Properties.VALUE)); } if (List.class.isAssignableFrom(c)) { Object o = this.get(Constants.Properties.VALUE); try { return this.getMapper().readValue(o.toString(), c); } catch (IOException e) { throw new IllegalStateException("Failed to convert to collection.", e); } } if (ObjectNode.class.isAssignableFrom(c)) { if (ObjectNode.class != c) { throw new IllegalArgumentException( "We support JSONObject but not its sub-classes."); } return c.cast(this.propertyBag); } else { JsonSerializable.checkForValidPOJO(c); try { return this.getMapper().readValue(this.toJson(), c); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } /** * Converts to a JSON string. * * @return the JSON string. */ public String toJson() { return this.toJson(SerializationFormattingPolicy.NONE); } /** * Converts to a JSON string. * * @param formattingPolicy the formatting policy to be used. * @return the JSON string. */ public String toJson(SerializationFormattingPolicy formattingPolicy) { this.populatePropertyBag(); if (SerializationFormattingPolicy.INDENTED.equals(formattingPolicy)) { return toPrettyJson(propertyBag); } else { return toJson(propertyBag); } } /** * Gets Simple STRING representation of property bag. * <p> * For proper conversion to json and inclusion of the default values * use {@link * * @return string representation of property bag. */ public String toString() { return toJson(propertyBag); } }
class JsonSerializable { private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper(); private final static Logger logger = LoggerFactory.getLogger(JsonSerializable.class); transient ObjectNode propertyBag = null; private ObjectMapper om; protected JsonSerializable() { this.propertyBag = OBJECT_MAPPER.createObjectNode(); } /** * Constructor. * * @param jsonString the json string that represents the JsonSerializable. * @param objectMapper the custom object mapper */ JsonSerializable(String jsonString, ObjectMapper objectMapper) { this.propertyBag = fromJson(jsonString); this.om = objectMapper; } /** * Constructor. * * @param jsonString the json string that represents the JsonSerializable. */ protected JsonSerializable(String jsonString) { this.propertyBag = fromJson(jsonString); } /** * Constructor. * * @param objectNode the {@link ObjectNode} that represent the {@link JsonSerializable} */ JsonSerializable(ObjectNode objectNode) { this.propertyBag = objectNode; } private static void checkForValidPOJO(Class<?> c) { if (c.isAnonymousClass() || c.isLocalClass()) { throw new IllegalArgumentException( String.format("%s can't be an anonymous or local class.", c.getName())); } if (c.isMemberClass() && !Modifier.isStatic(c.getModifiers())) { throw new IllegalArgumentException( String.format("%s must be static if it's a member class.", c.getName())); } } private ObjectMapper getMapper() { if (this.om != null) { return this.om; } return OBJECT_MAPPER; } void setMapper(ObjectMapper om) { this.om = om; } @JsonIgnore protected Logger getLogger() { return logger; } void populatePropertyBag() { } /** * Returns the propertybag(JSONObject) in a hashMap * * @return the HashMap. */ public Map<String, Object> getMap() { return getMapper().convertValue(this.propertyBag, HashMap.class); } /** * Checks whether a property exists. * * @param propertyName the property to look up. * @return true if the property exists. */ public boolean has(String propertyName) { return this.propertyBag.has(propertyName); } /** * Removes a value by propertyName. * * @param propertyName the property to remove. */ void remove(String propertyName) { this.propertyBag.remove(propertyName); } /** * Sets the value of a property. * * @param <T> the type of the object. * @param propertyName the property to set. * @param value the value of the property. */ @SuppressWarnings({"unchecked", "rawtypes"}) <T> void set(String propertyName, T value) { if (value == null) { this.propertyBag.putNull(propertyName); } else if (value instanceof Collection) { ArrayNode jsonArray = propertyBag.arrayNode(); this.internalSetCollection(propertyName, (Collection) value, jsonArray); this.propertyBag.set(propertyName, jsonArray); } else if (value instanceof JsonNode) { this.propertyBag.set(propertyName, (JsonNode) value); } else if (value instanceof JsonSerializable) { JsonSerializable castedValue = (JsonSerializable) value; if (castedValue != null) { castedValue.populatePropertyBag(); } this.propertyBag.set(propertyName, castedValue != null ? castedValue.propertyBag : null); } else { this.propertyBag.set(propertyName, getMapper().valueToTree(value)); } } @SuppressWarnings({"unchecked", "rawtypes"}) private <T> void internalSetCollection(String propertyName, Collection<T> collection, ArrayNode targetArray) { for (T childValue : collection) { if (childValue == null) { targetArray.addNull(); } else if (childValue instanceof Collection) { ArrayNode childArray = targetArray.addArray(); this.internalSetCollection(propertyName, (Collection) childValue, childArray); } else if (childValue instanceof JsonNode) { targetArray.add((JsonNode) childValue); } else if (childValue instanceof JsonSerializable) { JsonSerializable castedValue = (JsonSerializable) childValue; castedValue.populatePropertyBag(); targetArray.add(castedValue.propertyBag != null ? castedValue.propertyBag : this.getMapper().createObjectNode()); } else { targetArray.add(this.getMapper().valueToTree(childValue)); } } } /** * Gets a property value as Object. * * @param propertyName the property to get. * @return the value of the property. */ public Object get(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return getValue(this.propertyBag.get(propertyName)); } else { return null; } } /** * Gets a string value. * * @param propertyName the property to get. * @return the string value. */ public String getString(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return this.propertyBag.get(propertyName).asText(); } else { return null; } } /** * Gets a boolean value. * * @param propertyName the property to get. * @return the boolean value. */ public Boolean getBoolean(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return this.propertyBag.get(propertyName).asBoolean(); } else { return null; } } /** * Gets an integer value. * * @param propertyName the property to get. * @return the boolean value */ public Integer getInt(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return Integer.valueOf(this.propertyBag.get(propertyName).asInt()); } else { return null; } } /** * Gets a long value. * * @param propertyName the property to get. * @return the long value */ public Long getLong(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return Long.valueOf(this.propertyBag.get(propertyName).asLong()); } else { return null; } } /** * Gets a double value. * * @param propertyName the property to get. * @return the double value. */ public Double getDouble(String propertyName) { if (this.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { return new Double(this.propertyBag.get(propertyName).asDouble()); } else { return null; } } /** * Gets an object value. * * @param <T> the type of the object. * @param propertyName the property to get. * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object value. * @throws IllegalStateException thrown if an error occurs */ public <T> T getObject(String propertyName, Class<T> c, boolean... convertFromCamelCase) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { JsonNode jsonObj = propertyBag.get(propertyName); if (Number.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c) || Object.class == c) { return c.cast(getValue(jsonObj)); } else if (Enum.class.isAssignableFrom(c)) { try { String value = String.class.cast(getValue(jsonObj)); value = convertFromCamelCase.length > 0 && convertFromCamelCase[0] ? Strings.fromCamelCaseToUpperCase(value) : value; return c.cast(c.getMethod("valueOf", String.class).invoke(null, value)); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException("Failed to create enum.", e); } } else if (JsonSerializable.class.isAssignableFrom(c)) { try { Constructor<T> constructor = c.getDeclaredConstructor(String.class); if (Modifier.isPrivate(constructor.getModifiers())) { constructor.setAccessible(true); } return constructor.newInstance(toJson(jsonObj)); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException( "Failed to instantiate class object.", e); } } else { JsonSerializable.checkForValidPOJO(c); try { return this.getMapper().treeToValue(jsonObj, c); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } return null; } /** * Gets an object List. * * @param <T> the type of the objects in the List. * @param propertyName the property to get * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object collection. * @throws IllegalStateException thrown if an error occurs */ public <T> List<T> getList(String propertyName, Class<T> c, boolean... convertFromCamelCase) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { ArrayNode jsonArray = (ArrayNode) this.propertyBag.get(propertyName); ArrayList<T> result = new ArrayList<T>(); boolean isBaseClass = false; boolean isEnumClass = false; boolean isJsonSerializable = false; if (Number.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c) || Object.class == c) { isBaseClass = true; } else if (Enum.class.isAssignableFrom(c)) { isEnumClass = true; } else if (JsonSerializable.class.isAssignableFrom(c)) { isJsonSerializable = true; } else { JsonSerializable.checkForValidPOJO(c); } for (JsonNode n : jsonArray) { if (isBaseClass) { result.add(c.cast(getValue(n))); } else if (isEnumClass) { try { String value = String.class.cast(getValue(n)); value = convertFromCamelCase.length > 0 && convertFromCamelCase[0] ? Strings.fromCamelCaseToUpperCase(value) : value; result.add(c.cast(c.getMethod("valueOf", String.class).invoke(null, value))); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException("Failed to create enum.", e); } } else if (isJsonSerializable) { try { Constructor<T> constructor = c.getDeclaredConstructor(String.class); if (Modifier.isPrivate(constructor.getModifiers())) { constructor.setAccessible(true); } result.add(constructor.newInstance(toJson(n))); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalStateException( "Failed to instantiate class object.", e); } } else { try { result.add(this.getMapper().treeToValue(n, c)); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } return result; } return null; } /** * Gets an object collection. * * @param <T> the type of the objects in the collection. * @param propertyName the property to get * @param c the class of the object. If c is a POJO class, it must be a member (and not an anonymous or local) * and a static one. * @param convertFromCamelCase boolean indicating if String should be converted from camel case to upper case * separated by underscore, * before converting to required class. * @return the object collection. */ public <T> Collection<T> getCollection(String propertyName, Class<T> c, boolean... convertFromCamelCase) { return getList(propertyName, c, convertFromCamelCase); } /** * Gets a JSONObject. * * @param propertyName the property to get. * @return the JSONObject. */ ObjectNode getObject(String propertyName) { if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { ObjectNode jsonObj = (ObjectNode) this.propertyBag.get(propertyName); return jsonObj; } return null; } /** * Gets a JSONObject collection. * * @param propertyName the property to get. * @return the JSONObject collection. */ Collection<ObjectNode> getCollection(String propertyName) { Collection<ObjectNode> result = null; if (this.propertyBag.has(propertyName) && this.propertyBag.hasNonNull(propertyName)) { result = new ArrayList<ObjectNode>(); for (JsonNode n : this.propertyBag.findValues(propertyName)) { result.add((ObjectNode) n); } } return result; } /** * Gets the value of a property identified by an array of property names that forms the path. * * @param propertyNames that form the path to the property to get. * @return the value of the property. */ public Object getObjectByPath(List<String> propertyNames) { ObjectNode propBag = this.propertyBag; JsonNode value = null; String propertyName = null; Integer matchedProperties = 0; Iterator<String> iterator = propertyNames.iterator(); if (iterator.hasNext()) { do { propertyName = iterator.next(); if (propBag.has(propertyName)) { matchedProperties++; value = propBag.get(propertyName); if (!value.isObject()) { break; } propBag = (ObjectNode) value; } else { break; } } while (iterator.hasNext()); if (value != null && matchedProperties == propertyNames.size()) { return getValue(value); } } return null; } private ObjectNode fromJson(String json) { try { return (ObjectNode) getMapper().readTree(json); } catch (IOException e) { throw new IllegalArgumentException( String.format("Unable to parse JSON %s", json), e); } } private String toJson(Object object) { try { return getMapper().writeValueAsString(object); } catch (JsonProcessingException e) { throw new IllegalStateException("Unable to convert JSON to STRING", e); } } private String toPrettyJson(Object object) { try { return getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(object); } catch (JsonProcessingException e) { throw new IllegalStateException("Unable to convert JSON to STRING", e); } } /** * Converts to an Object (only POJOs and JSONObject are supported). * * @param <T> the type of the object. * @param c the class of the object, either a POJO class or JSONObject. If c is a POJO class, it must be a member * (and not an anonymous or local) and a static one. * @return the POJO. * @throws IllegalArgumentException thrown if an error occurs */ public <T> T toObject(Class<T> c) { if (CosmosItemProperties.class.isAssignableFrom(c)) { return (T) new CosmosItemProperties(this.toJson()); } if (JsonSerializable.class.isAssignableFrom(c) || String.class.isAssignableFrom(c) || Number.class.isAssignableFrom(c) || Boolean.class.isAssignableFrom(c)) { return c.cast(this.get(Constants.Properties.VALUE)); } if (List.class.isAssignableFrom(c)) { Object o = this.get(Constants.Properties.VALUE); try { return this.getMapper().readValue(o.toString(), c); } catch (IOException e) { throw new IllegalStateException("Failed to convert to collection.", e); } } if (ObjectNode.class.isAssignableFrom(c)) { if (ObjectNode.class != c) { throw new IllegalArgumentException( "We support JSONObject but not its sub-classes."); } return c.cast(this.propertyBag); } else { JsonSerializable.checkForValidPOJO(c); try { return this.getMapper().readValue(this.toJson(), c); } catch (IOException e) { throw new IllegalStateException("Failed to get POJO.", e); } } } /** * Converts to a JSON string. * * @return the JSON string. */ public String toJson() { return this.toJson(SerializationFormattingPolicy.NONE); } /** * Converts to a JSON string. * * @param formattingPolicy the formatting policy to be used. * @return the JSON string. */ public String toJson(SerializationFormattingPolicy formattingPolicy) { this.populatePropertyBag(); if (SerializationFormattingPolicy.INDENTED.equals(formattingPolicy)) { return toPrettyJson(propertyBag); } else { return toJson(propertyBag); } } /** * Gets Simple STRING representation of property bag. * <p> * For proper conversion to json and inclusion of the default values * use {@link * * @return string representation of property bag. */ public String toString() { return toJson(propertyBag); } }
Revert this part back to line 144. It will minimize changes.
public boolean start() throws IOException { final int defaultPartitionInitTimeout = 60 * 1000; final int kafkaRequestTimeoutMultiple = 2; Read<K, V> spec = source.getSpec(); consumer = spec.getConsumerFactoryFn().apply(spec.getConsumerConfig()); consumerSpEL.evaluateAssign(consumer, spec.getTopicPartitions()); try { keyDeserializerInstance = spec.getKeyDeserializer().getDeclaredConstructor().newInstance(); valueDeserializerInstance = spec.getValueDeserializer().getDeclaredConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IOException("Could not instantiate deserializers", e); } keyDeserializerInstance.configure(spec.getConsumerConfig(), true); valueDeserializerInstance.configure(spec.getConsumerConfig(), false); for (final PartitionState pState : partitionStates) { Future<?> future = consumerPollThread.submit(() -> setupInitialOffset(pState)); try { Integer reqTimeout = (Integer) spec.getConsumerConfig().get(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG); future.get( reqTimeout != null ? kafkaRequestTimeoutMultiple * reqTimeout : defaultPartitionInitTimeout, TimeUnit.MILLISECONDS); } catch (TimeoutException e) { consumer.wakeup(); String msg = String.format( "%s: Timeout while initializing partition '%s'. " + "Kafka client may not be able to connect to servers.", this, pState.topicPartition); LOG.error("{}", msg); throw new IOException(msg); } catch (Exception e) { throw new IOException(e); } LOG.info( "{}: reading from {} starting at offset {}", name, pState.topicPartition, pState.nextOffset); } consumerPollThread.submit(this::consumerPollLoop); Map<String, Object> offsetConsumerConfig = new HashMap<>(spec.getConsumerConfig()); offsetConsumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); offsetConsumerConfig.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_uncommitted"); Object groupId = spec.getConsumerConfig().get(ConsumerConfig.GROUP_ID_CONFIG); String offsetGroupId = String.format( "%s_offset_consumer_%d_%s", name, (new Random()).nextInt(Integer.MAX_VALUE), (groupId == null ? "none" : groupId)); offsetConsumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, offsetGroupId); if (spec.getOffsetConsumerConfig() != null) { offsetConsumerConfig.putAll(spec.getOffsetConsumerConfig()); } offsetConsumer = spec.getConsumerFactoryFn().apply(offsetConsumerConfig); consumerSpEL.evaluateAssign(offsetConsumer, spec.getTopicPartitions()); updateLatestOffsets(); offsetFetcherThread.scheduleAtFixedRate( this::updateLatestOffsets, 0, OFFSET_UPDATE_INTERVAL_SECONDS, TimeUnit.SECONDS); return advance(); }
Object groupId = spec.getConsumerConfig().get(ConsumerConfig.GROUP_ID_CONFIG);
public boolean start() throws IOException { final int defaultPartitionInitTimeout = 60 * 1000; final int kafkaRequestTimeoutMultiple = 2; Read<K, V> spec = source.getSpec(); consumer = spec.getConsumerFactoryFn().apply(spec.getConsumerConfig()); consumerSpEL.evaluateAssign(consumer, spec.getTopicPartitions()); try { keyDeserializerInstance = spec.getKeyDeserializer().getDeclaredConstructor().newInstance(); valueDeserializerInstance = spec.getValueDeserializer().getDeclaredConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IOException("Could not instantiate deserializers", e); } keyDeserializerInstance.configure(spec.getConsumerConfig(), true); valueDeserializerInstance.configure(spec.getConsumerConfig(), false); for (final PartitionState pState : partitionStates) { Future<?> future = consumerPollThread.submit(() -> setupInitialOffset(pState)); try { Integer reqTimeout = (Integer) spec.getConsumerConfig().get(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG); future.get( reqTimeout != null ? kafkaRequestTimeoutMultiple * reqTimeout : defaultPartitionInitTimeout, TimeUnit.MILLISECONDS); } catch (TimeoutException e) { consumer.wakeup(); String msg = String.format( "%s: Timeout while initializing partition '%s'. " + "Kafka client may not be able to connect to servers.", this, pState.topicPartition); LOG.error("{}", msg); throw new IOException(msg); } catch (Exception e) { throw new IOException(e); } LOG.info( "{}: reading from {} starting at offset {}", name, pState.topicPartition, pState.nextOffset); } consumerPollThread.submit(this::consumerPollLoop); Map<String, Object> offsetConsumerConfig = getOffsetConsumerConfig(); offsetConsumer = spec.getConsumerFactoryFn().apply(offsetConsumerConfig); consumerSpEL.evaluateAssign(offsetConsumer, spec.getTopicPartitions()); updateLatestOffsets(); offsetFetcherThread.scheduleAtFixedRate( this::updateLatestOffsets, 0, OFFSET_UPDATE_INTERVAL_SECONDS, TimeUnit.SECONDS); return advance(); }
class KafkaUnboundedReader<K, V> extends UnboundedReader<KafkaRecord<K, V>> { @SuppressWarnings("FutureReturnValueIgnored") @Override @Override public boolean advance() throws IOException { /* Read first record (if any). we need to loop here because : * - (a) some records initially need to be skipped if they are before consumedOffset * - (b) if curBatch is empty, we want to fetch next batch and then advance. * - (c) curBatch is an iterator of iterators. we interleave the records from each. * curBatch.next() might return an empty iterator. */ while (true) { if (curBatch.hasNext()) { PartitionState<K, V> pState = curBatch.next(); if (!pState.recordIter.hasNext()) { pState.recordIter = Collections.emptyIterator(); curBatch.remove(); continue; } elementsRead.inc(); elementsReadBySplit.inc(); ConsumerRecord<byte[], byte[]> rawRecord = pState.recordIter.next(); long expected = pState.nextOffset; long offset = rawRecord.offset(); if (offset < expected) { LOG.warn( "{}: ignoring already consumed offset {} for {}", this, offset, pState.topicPartition); continue; } long offsetGap = offset - expected; if (curRecord == null) { LOG.info("{}: first record offset {}", name, offset); offsetGap = 0; } KafkaRecord<K, V> record = new KafkaRecord<>( rawRecord.topic(), rawRecord.partition(), rawRecord.offset(), consumerSpEL.getRecordTimestamp(rawRecord), consumerSpEL.getRecordTimestampType(rawRecord), ConsumerSpEL.hasHeaders ? rawRecord.headers() : null, keyDeserializerInstance.deserialize(rawRecord.topic(), rawRecord.key()), valueDeserializerInstance.deserialize(rawRecord.topic(), rawRecord.value())); curTimestamp = pState.timestampPolicy.getTimestampForRecord(pState.mkTimestampPolicyContext(), record); curRecord = record; int recordSize = (rawRecord.key() == null ? 0 : rawRecord.key().length) + (rawRecord.value() == null ? 0 : rawRecord.value().length); pState.recordConsumed(offset, recordSize, offsetGap); bytesRead.inc(recordSize); bytesReadBySplit.inc(recordSize); return true; } else { nextBatch(); if (!curBatch.hasNext()) { return false; } } } } @Override public Instant getWatermark() { if (source.getSpec().getWatermarkFn() != null) { if (curRecord == null) { LOG.debug("{}: getWatermark() : no records have been read yet.", name); return initialWatermark; } return source.getSpec().getWatermarkFn().apply(curRecord); } return partitionStates.stream() .map(PartitionState::updateAndGetWatermark) .min(Comparator.naturalOrder()) .get(); } @Override public CheckpointMark getCheckpointMark() { reportBacklog(); return new KafkaCheckpointMark( partitionStates.stream() .map( p -> new PartitionMark( p.topicPartition.topic(), p.topicPartition.partition(), p.nextOffset, p.lastWatermark.getMillis())) .collect(Collectors.toList()), source.getSpec().isCommitOffsetsInFinalizeEnabled() ? Optional.of(this) : Optional.empty()); } @Override public UnboundedSource<KafkaRecord<K, V>, ?> getCurrentSource() { return source; } @Override public KafkaRecord<K, V> getCurrent() throws NoSuchElementException { return curRecord; } @Override public Instant getCurrentTimestamp() throws NoSuchElementException { return curTimestamp; } @Override public long getSplitBacklogBytes() { long backlogBytes = 0; for (PartitionState p : partitionStates) { long pBacklog = p.approxBacklogInBytes(); if (pBacklog == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } backlogBytes += pBacklog; } return backlogBytes; } private static final Logger LOG = LoggerFactory.getLogger(KafkaUnboundedSource.class); @VisibleForTesting static final String METRIC_NAMESPACE = "KafkaIOReader"; @VisibleForTesting static final String CHECKPOINT_MARK_COMMITS_ENQUEUED_METRIC = "checkpointMarkCommitsEnqueued"; private static final String CHECKPOINT_MARK_COMMITS_SKIPPED_METRIC = "checkpointMarkCommitsSkipped"; private final KafkaUnboundedSource<K, V> source; private final String name; private Consumer<byte[], byte[]> consumer; private final List<PartitionState<K, V>> partitionStates; private KafkaRecord<K, V> curRecord; private Instant curTimestamp; private Iterator<PartitionState<K, V>> curBatch = Collections.emptyIterator(); private Deserializer<K> keyDeserializerInstance = null; private Deserializer<V> valueDeserializerInstance = null; private final Counter elementsRead = SourceMetrics.elementsRead(); private final Counter bytesRead = SourceMetrics.bytesRead(); private final Counter elementsReadBySplit; private final Counter bytesReadBySplit; private final Gauge backlogBytesOfSplit; private final Gauge backlogElementsOfSplit; private final Counter checkpointMarkCommitsEnqueued = Metrics.counter(METRIC_NAMESPACE, CHECKPOINT_MARK_COMMITS_ENQUEUED_METRIC); private final Counter checkpointMarkCommitsSkipped = Metrics.counter(METRIC_NAMESPACE, CHECKPOINT_MARK_COMMITS_SKIPPED_METRIC); /** * The poll timeout while reading records from Kafka. If option to commit reader offsets in to * Kafka in {@link KafkaCheckpointMark * this poll returns. It should be reasonably low as a result. At the same time it probably can't * be very low like 10 millis, I am not sure how it affects when the latency is high. Probably * good to experiment. Often multiple marks would be finalized in a batch, it it reduce * finalization overhead to wait a short while and finalize only the last checkpoint mark. */ private static final Duration KAFKA_POLL_TIMEOUT = Duration.millis(1000); private static final Duration RECORDS_DEQUEUE_POLL_TIMEOUT = Duration.millis(10); private static final Duration RECORDS_ENQUEUE_POLL_TIMEOUT = Duration.millis(100); private final ExecutorService consumerPollThread = Executors.newSingleThreadExecutor(); private AtomicReference<Exception> consumerPollException = new AtomicReference<>(); private final SynchronousQueue<ConsumerRecords<byte[], byte[]>> availableRecordsQueue = new SynchronousQueue<>(); private AtomicReference<KafkaCheckpointMark> finalizedCheckpointMark = new AtomicReference<>(); private AtomicBoolean closed = new AtomicBoolean(false); private Consumer<byte[], byte[]> offsetConsumer; private final ScheduledExecutorService offsetFetcherThread = Executors.newSingleThreadScheduledExecutor(); private static final int OFFSET_UPDATE_INTERVAL_SECONDS = 1; private static final long UNINITIALIZED_OFFSET = -1; private transient ConsumerSpEL consumerSpEL; /** watermark before any records have been read. */ private static Instant initialWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; @Override public String toString() { return name; } private static class MovingAvg { private static final int MOVING_AVG_WINDOW = 1000; private double avg = 0; private long numUpdates = 0; void update(double quantity) { numUpdates++; avg += (quantity - avg) / Math.min(MOVING_AVG_WINDOW, numUpdates); } double get() { return avg; } } private static class TimestampPolicyContext extends TimestampPolicy.PartitionContext { private final long messageBacklog; private final Instant backlogCheckTime; TimestampPolicyContext(long messageBacklog, Instant backlogCheckTime) { this.messageBacklog = messageBacklog; this.backlogCheckTime = backlogCheckTime; } @Override public long getMessageBacklog() { return messageBacklog; } @Override public Instant getBacklogCheckTime() { return backlogCheckTime; } } private static class PartitionState<K, V> { private final TopicPartition topicPartition; private long nextOffset; private long latestOffset; private Instant latestOffsetFetchTime; private Instant lastWatermark; private final TimestampPolicy<K, V> timestampPolicy; private Iterator<ConsumerRecord<byte[], byte[]>> recordIter = Collections.emptyIterator(); private MovingAvg avgRecordSize = new MovingAvg(); private MovingAvg avgOffsetGap = new MovingAvg(); PartitionState( TopicPartition partition, long nextOffset, TimestampPolicy<K, V> timestampPolicy) { this.topicPartition = partition; this.nextOffset = nextOffset; this.latestOffset = UNINITIALIZED_OFFSET; this.latestOffsetFetchTime = BoundedWindow.TIMESTAMP_MIN_VALUE; this.lastWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; this.timestampPolicy = timestampPolicy; } void recordConsumed(long offset, int size, long offsetGap) { nextOffset = offset + 1; avgRecordSize.update(size); avgOffsetGap.update(offsetGap); } synchronized void setLatestOffset(long latestOffset, Instant fetchTime) { this.latestOffset = latestOffset; this.latestOffsetFetchTime = fetchTime; LOG.debug( "{}: latest offset update for {} : {} (consumer offset {}, avg record size {})", this, topicPartition, latestOffset, nextOffset, avgRecordSize); } synchronized long approxBacklogInBytes() { long backlogMessageCount = backlogMessageCount(); if (backlogMessageCount == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } return (long) (backlogMessageCount * avgRecordSize.get()); } synchronized long backlogMessageCount() { if (latestOffset < 0 || nextOffset < 0) { return UnboundedReader.BACKLOG_UNKNOWN; } double remaining = (latestOffset - nextOffset) / (1 + avgOffsetGap.get()); return Math.max(0, (long) Math.ceil(remaining)); } synchronized TimestampPolicyContext mkTimestampPolicyContext() { return new TimestampPolicyContext(backlogMessageCount(), latestOffsetFetchTime); } Instant updateAndGetWatermark() { lastWatermark = timestampPolicy.getWatermark(mkTimestampPolicyContext()); return lastWatermark; } } KafkaUnboundedReader( KafkaUnboundedSource<K, V> source, @Nullable KafkaCheckpointMark checkpointMark) { this.consumerSpEL = new ConsumerSpEL(); this.source = source; this.name = "Reader-" + source.getId(); List<TopicPartition> partitions = source.getSpec().getTopicPartitions(); List<PartitionState<K, V>> states = new ArrayList<>(partitions.size()); if (checkpointMark != null) { checkState( checkpointMark.getPartitions().size() == partitions.size(), "checkPointMark and assignedPartitions should match"); } for (int i = 0; i < partitions.size(); i++) { TopicPartition tp = partitions.get(i); long nextOffset = UNINITIALIZED_OFFSET; Optional<Instant> prevWatermark = Optional.empty(); if (checkpointMark != null) { PartitionMark ckptMark = checkpointMark.getPartitions().get(i); TopicPartition partition = new TopicPartition(ckptMark.getTopic(), ckptMark.getPartition()); checkState( partition.equals(tp), "checkpointed partition %s and assigned partition %s don't match", partition, tp); nextOffset = ckptMark.getNextOffset(); prevWatermark = Optional.of(new Instant(ckptMark.getWatermarkMillis())); } states.add( new PartitionState<>( tp, nextOffset, source .getSpec() .getTimestampPolicyFactory() .createTimestampPolicy(tp, prevWatermark))); } partitionStates = ImmutableList.copyOf(states); String splitId = String.valueOf(source.getId()); elementsReadBySplit = SourceMetrics.elementsReadBySplit(splitId); bytesReadBySplit = SourceMetrics.bytesReadBySplit(splitId); backlogBytesOfSplit = SourceMetrics.backlogBytesOfSplit(splitId); backlogElementsOfSplit = SourceMetrics.backlogElementsOfSplit(splitId); } private void consumerPollLoop() { try { ConsumerRecords<byte[], byte[]> records = ConsumerRecords.empty(); while (!closed.get()) { try { if (records.isEmpty()) { records = consumer.poll(KAFKA_POLL_TIMEOUT.getMillis()); } else if (availableRecordsQueue.offer( records, RECORDS_ENQUEUE_POLL_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)) { records = ConsumerRecords.empty(); } KafkaCheckpointMark checkpointMark = finalizedCheckpointMark.getAndSet(null); if (checkpointMark != null) { commitCheckpointMark(checkpointMark); } } catch (InterruptedException e) { LOG.warn("{}: consumer thread is interrupted", this, e); break; } catch (WakeupException e) { break; } } LOG.info("{}: Returning from consumer pool loop", this); } catch (Exception e) { LOG.error("{}: Exception while reading from Kafka", this, e); consumerPollException.set(e); throw e; } } private void commitCheckpointMark(KafkaCheckpointMark checkpointMark) { LOG.debug("{}: Committing finalized checkpoint {}", this, checkpointMark); consumer.commitSync( checkpointMark.getPartitions().stream() .filter(p -> p.getNextOffset() != UNINITIALIZED_OFFSET) .collect( Collectors.toMap( p -> new TopicPartition(p.getTopic(), p.getPartition()), p -> new OffsetAndMetadata(p.getNextOffset())))); } /** * Enqueue checkpoint mark to be committed to Kafka. This does not block until it is committed. * There could be a delay of up to KAFKA_POLL_TIMEOUT (1 second). Any checkpoint mark enqueued * earlier is dropped in favor of this checkpoint mark. Documentation for {@link * CheckpointMark * need to be committed. */ void finalizeCheckpointMarkAsync(KafkaCheckpointMark checkpointMark) { if (finalizedCheckpointMark.getAndSet(checkpointMark) != null) { checkpointMarkCommitsSkipped.inc(); } checkpointMarkCommitsEnqueued.inc(); } private void nextBatch() throws IOException { curBatch = Collections.emptyIterator(); ConsumerRecords<byte[], byte[]> records; try { records = availableRecordsQueue.poll( RECORDS_DEQUEUE_POLL_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOG.warn("{}: Unexpected", this, e); return; } if (records == null) { if (consumerPollException.get() != null) { throw new IOException("Exception while reading from Kafka", consumerPollException.get()); } return; } partitionStates.forEach(p -> p.recordIter = records.records(p.topicPartition).iterator()); curBatch = Iterators.cycle(new ArrayList<>(partitionStates)); } private void setupInitialOffset(PartitionState pState) { Read<K, V> spec = source.getSpec(); if (pState.nextOffset != UNINITIALIZED_OFFSET) { consumer.seek(pState.topicPartition, pState.nextOffset); } else { Instant startReadTime = spec.getStartReadTime(); if (startReadTime != null) { pState.nextOffset = consumerSpEL.offsetForTime(consumer, pState.topicPartition, spec.getStartReadTime()); consumer.seek(pState.topicPartition, pState.nextOffset); } else { pState.nextOffset = consumer.position(pState.topicPartition); } } } private void updateLatestOffsets() { for (PartitionState p : partitionStates) { try { Instant fetchTime = Instant.now(); consumerSpEL.evaluateSeek2End(offsetConsumer, p.topicPartition); long offset = offsetConsumer.position(p.topicPartition); p.setLatestOffset(offset, fetchTime); } catch (Exception e) { if (closed.get()) { break; } LOG.warn( "{}: exception while fetching latest offset for partition {}. will be retried.", this, p.topicPartition, e); } } LOG.debug("{}: backlog {}", this, getSplitBacklogBytes()); } private void reportBacklog() { long splitBacklogBytes = getSplitBacklogBytes(); if (splitBacklogBytes < 0) { splitBacklogBytes = UnboundedReader.BACKLOG_UNKNOWN; } backlogBytesOfSplit.set(splitBacklogBytes); long splitBacklogMessages = getSplitBacklogMessageCount(); if (splitBacklogMessages < 0) { splitBacklogMessages = UnboundedReader.BACKLOG_UNKNOWN; } backlogElementsOfSplit.set(splitBacklogMessages); } private long getSplitBacklogMessageCount() { long backlogCount = 0; for (PartitionState p : partitionStates) { long pBacklog = p.backlogMessageCount(); if (pBacklog == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } backlogCount += pBacklog; } return backlogCount; } @Override public void close() throws IOException { closed.set(true); consumerPollThread.shutdown(); offsetFetcherThread.shutdown(); boolean isShutdown = false; while (!isShutdown) { if (consumer != null) { consumer.wakeup(); } if (offsetConsumer != null) { offsetConsumer.wakeup(); } availableRecordsQueue.poll(); try { isShutdown = consumerPollThread.awaitTermination(10, TimeUnit.SECONDS) && offsetFetcherThread.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } if (!isShutdown) { LOG.warn("An internal thread is taking a long time to shutdown. will retry."); } } Closeables.close(keyDeserializerInstance, true); Closeables.close(valueDeserializerInstance, true); Closeables.close(offsetConsumer, true); Closeables.close(consumer, true); } }
class KafkaUnboundedReader<K, V> extends UnboundedReader<KafkaRecord<K, V>> { @SuppressWarnings("FutureReturnValueIgnored") @Override @Override public boolean advance() throws IOException { /* Read first record (if any). we need to loop here because : * - (a) some records initially need to be skipped if they are before consumedOffset * - (b) if curBatch is empty, we want to fetch next batch and then advance. * - (c) curBatch is an iterator of iterators. we interleave the records from each. * curBatch.next() might return an empty iterator. */ while (true) { if (curBatch.hasNext()) { PartitionState<K, V> pState = curBatch.next(); if (!pState.recordIter.hasNext()) { pState.recordIter = Collections.emptyIterator(); curBatch.remove(); continue; } elementsRead.inc(); elementsReadBySplit.inc(); ConsumerRecord<byte[], byte[]> rawRecord = pState.recordIter.next(); long expected = pState.nextOffset; long offset = rawRecord.offset(); if (offset < expected) { LOG.warn( "{}: ignoring already consumed offset {} for {}", this, offset, pState.topicPartition); continue; } long offsetGap = offset - expected; if (curRecord == null) { LOG.info("{}: first record offset {}", name, offset); offsetGap = 0; } KafkaRecord<K, V> record = new KafkaRecord<>( rawRecord.topic(), rawRecord.partition(), rawRecord.offset(), consumerSpEL.getRecordTimestamp(rawRecord), consumerSpEL.getRecordTimestampType(rawRecord), ConsumerSpEL.hasHeaders ? rawRecord.headers() : null, keyDeserializerInstance.deserialize(rawRecord.topic(), rawRecord.key()), valueDeserializerInstance.deserialize(rawRecord.topic(), rawRecord.value())); curTimestamp = pState.timestampPolicy.getTimestampForRecord(pState.mkTimestampPolicyContext(), record); curRecord = record; int recordSize = (rawRecord.key() == null ? 0 : rawRecord.key().length) + (rawRecord.value() == null ? 0 : rawRecord.value().length); pState.recordConsumed(offset, recordSize, offsetGap); bytesRead.inc(recordSize); bytesReadBySplit.inc(recordSize); return true; } else { nextBatch(); if (!curBatch.hasNext()) { return false; } } } } @Override public Instant getWatermark() { if (source.getSpec().getWatermarkFn() != null) { if (curRecord == null) { LOG.debug("{}: getWatermark() : no records have been read yet.", name); return initialWatermark; } return source.getSpec().getWatermarkFn().apply(curRecord); } return partitionStates.stream() .map(PartitionState::updateAndGetWatermark) .min(Comparator.naturalOrder()) .get(); } @Override public CheckpointMark getCheckpointMark() { reportBacklog(); return new KafkaCheckpointMark( partitionStates.stream() .map( p -> new PartitionMark( p.topicPartition.topic(), p.topicPartition.partition(), p.nextOffset, p.lastWatermark.getMillis())) .collect(Collectors.toList()), source.getSpec().isCommitOffsetsInFinalizeEnabled() ? Optional.of(this) : Optional.empty()); } @Override public UnboundedSource<KafkaRecord<K, V>, ?> getCurrentSource() { return source; } @Override public KafkaRecord<K, V> getCurrent() throws NoSuchElementException { return curRecord; } @Override public Instant getCurrentTimestamp() throws NoSuchElementException { return curTimestamp; } @Override public long getSplitBacklogBytes() { long backlogBytes = 0; for (PartitionState p : partitionStates) { long pBacklog = p.approxBacklogInBytes(); if (pBacklog == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } backlogBytes += pBacklog; } return backlogBytes; } private static final Logger LOG = LoggerFactory.getLogger(KafkaUnboundedSource.class); @VisibleForTesting static final String METRIC_NAMESPACE = "KafkaIOReader"; @VisibleForTesting static final String CHECKPOINT_MARK_COMMITS_ENQUEUED_METRIC = "checkpointMarkCommitsEnqueued"; private static final String CHECKPOINT_MARK_COMMITS_SKIPPED_METRIC = "checkpointMarkCommitsSkipped"; private final KafkaUnboundedSource<K, V> source; private final String name; private Consumer<byte[], byte[]> consumer; private final List<PartitionState<K, V>> partitionStates; private KafkaRecord<K, V> curRecord; private Instant curTimestamp; private Iterator<PartitionState<K, V>> curBatch = Collections.emptyIterator(); private Deserializer<K> keyDeserializerInstance = null; private Deserializer<V> valueDeserializerInstance = null; private final Counter elementsRead = SourceMetrics.elementsRead(); private final Counter bytesRead = SourceMetrics.bytesRead(); private final Counter elementsReadBySplit; private final Counter bytesReadBySplit; private final Gauge backlogBytesOfSplit; private final Gauge backlogElementsOfSplit; private final Counter checkpointMarkCommitsEnqueued = Metrics.counter(METRIC_NAMESPACE, CHECKPOINT_MARK_COMMITS_ENQUEUED_METRIC); private final Counter checkpointMarkCommitsSkipped = Metrics.counter(METRIC_NAMESPACE, CHECKPOINT_MARK_COMMITS_SKIPPED_METRIC); /** * The poll timeout while reading records from Kafka. If option to commit reader offsets in to * Kafka in {@link KafkaCheckpointMark * this poll returns. It should be reasonably low as a result. At the same time it probably can't * be very low like 10 millis, I am not sure how it affects when the latency is high. Probably * good to experiment. Often multiple marks would be finalized in a batch, it it reduce * finalization overhead to wait a short while and finalize only the last checkpoint mark. */ private static final Duration KAFKA_POLL_TIMEOUT = Duration.millis(1000); private static final Duration RECORDS_DEQUEUE_POLL_TIMEOUT = Duration.millis(10); private static final Duration RECORDS_ENQUEUE_POLL_TIMEOUT = Duration.millis(100); private final ExecutorService consumerPollThread = Executors.newSingleThreadExecutor(); private AtomicReference<Exception> consumerPollException = new AtomicReference<>(); private final SynchronousQueue<ConsumerRecords<byte[], byte[]>> availableRecordsQueue = new SynchronousQueue<>(); private AtomicReference<KafkaCheckpointMark> finalizedCheckpointMark = new AtomicReference<>(); private AtomicBoolean closed = new AtomicBoolean(false); private Consumer<byte[], byte[]> offsetConsumer; private final ScheduledExecutorService offsetFetcherThread = Executors.newSingleThreadScheduledExecutor(); private static final int OFFSET_UPDATE_INTERVAL_SECONDS = 1; private static final long UNINITIALIZED_OFFSET = -1; private transient ConsumerSpEL consumerSpEL; /** watermark before any records have been read. */ private static Instant initialWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; @Override public String toString() { return name; } private static class MovingAvg { private static final int MOVING_AVG_WINDOW = 1000; private double avg = 0; private long numUpdates = 0; void update(double quantity) { numUpdates++; avg += (quantity - avg) / Math.min(MOVING_AVG_WINDOW, numUpdates); } double get() { return avg; } } private static class TimestampPolicyContext extends TimestampPolicy.PartitionContext { private final long messageBacklog; private final Instant backlogCheckTime; TimestampPolicyContext(long messageBacklog, Instant backlogCheckTime) { this.messageBacklog = messageBacklog; this.backlogCheckTime = backlogCheckTime; } @Override public long getMessageBacklog() { return messageBacklog; } @Override public Instant getBacklogCheckTime() { return backlogCheckTime; } } private static class PartitionState<K, V> { private final TopicPartition topicPartition; private long nextOffset; private long latestOffset; private Instant latestOffsetFetchTime; private Instant lastWatermark; private final TimestampPolicy<K, V> timestampPolicy; private Iterator<ConsumerRecord<byte[], byte[]>> recordIter = Collections.emptyIterator(); private MovingAvg avgRecordSize = new MovingAvg(); private MovingAvg avgOffsetGap = new MovingAvg(); PartitionState( TopicPartition partition, long nextOffset, TimestampPolicy<K, V> timestampPolicy) { this.topicPartition = partition; this.nextOffset = nextOffset; this.latestOffset = UNINITIALIZED_OFFSET; this.latestOffsetFetchTime = BoundedWindow.TIMESTAMP_MIN_VALUE; this.lastWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; this.timestampPolicy = timestampPolicy; } void recordConsumed(long offset, int size, long offsetGap) { nextOffset = offset + 1; avgRecordSize.update(size); avgOffsetGap.update(offsetGap); } synchronized void setLatestOffset(long latestOffset, Instant fetchTime) { this.latestOffset = latestOffset; this.latestOffsetFetchTime = fetchTime; LOG.debug( "{}: latest offset update for {} : {} (consumer offset {}, avg record size {})", this, topicPartition, latestOffset, nextOffset, avgRecordSize); } synchronized long approxBacklogInBytes() { long backlogMessageCount = backlogMessageCount(); if (backlogMessageCount == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } return (long) (backlogMessageCount * avgRecordSize.get()); } synchronized long backlogMessageCount() { if (latestOffset < 0 || nextOffset < 0) { return UnboundedReader.BACKLOG_UNKNOWN; } double remaining = (latestOffset - nextOffset) / (1 + avgOffsetGap.get()); return Math.max(0, (long) Math.ceil(remaining)); } synchronized TimestampPolicyContext mkTimestampPolicyContext() { return new TimestampPolicyContext(backlogMessageCount(), latestOffsetFetchTime); } Instant updateAndGetWatermark() { lastWatermark = timestampPolicy.getWatermark(mkTimestampPolicyContext()); return lastWatermark; } } KafkaUnboundedReader( KafkaUnboundedSource<K, V> source, @Nullable KafkaCheckpointMark checkpointMark) { this.consumerSpEL = new ConsumerSpEL(); this.source = source; this.name = "Reader-" + source.getId(); List<TopicPartition> partitions = source.getSpec().getTopicPartitions(); List<PartitionState<K, V>> states = new ArrayList<>(partitions.size()); if (checkpointMark != null) { checkState( checkpointMark.getPartitions().size() == partitions.size(), "checkPointMark and assignedPartitions should match"); } for (int i = 0; i < partitions.size(); i++) { TopicPartition tp = partitions.get(i); long nextOffset = UNINITIALIZED_OFFSET; Optional<Instant> prevWatermark = Optional.empty(); if (checkpointMark != null) { PartitionMark ckptMark = checkpointMark.getPartitions().get(i); TopicPartition partition = new TopicPartition(ckptMark.getTopic(), ckptMark.getPartition()); checkState( partition.equals(tp), "checkpointed partition %s and assigned partition %s don't match", partition, tp); nextOffset = ckptMark.getNextOffset(); prevWatermark = Optional.of(new Instant(ckptMark.getWatermarkMillis())); } states.add( new PartitionState<>( tp, nextOffset, source .getSpec() .getTimestampPolicyFactory() .createTimestampPolicy(tp, prevWatermark))); } partitionStates = ImmutableList.copyOf(states); String splitId = String.valueOf(source.getId()); elementsReadBySplit = SourceMetrics.elementsReadBySplit(splitId); bytesReadBySplit = SourceMetrics.bytesReadBySplit(splitId); backlogBytesOfSplit = SourceMetrics.backlogBytesOfSplit(splitId); backlogElementsOfSplit = SourceMetrics.backlogElementsOfSplit(splitId); } private void consumerPollLoop() { try { ConsumerRecords<byte[], byte[]> records = ConsumerRecords.empty(); while (!closed.get()) { try { if (records.isEmpty()) { records = consumer.poll(KAFKA_POLL_TIMEOUT.getMillis()); } else if (availableRecordsQueue.offer( records, RECORDS_ENQUEUE_POLL_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)) { records = ConsumerRecords.empty(); } KafkaCheckpointMark checkpointMark = finalizedCheckpointMark.getAndSet(null); if (checkpointMark != null) { commitCheckpointMark(checkpointMark); } } catch (InterruptedException e) { LOG.warn("{}: consumer thread is interrupted", this, e); break; } catch (WakeupException e) { break; } } LOG.info("{}: Returning from consumer pool loop", this); } catch (Exception e) { LOG.error("{}: Exception while reading from Kafka", this, e); consumerPollException.set(e); throw e; } } private void commitCheckpointMark(KafkaCheckpointMark checkpointMark) { LOG.debug("{}: Committing finalized checkpoint {}", this, checkpointMark); consumer.commitSync( checkpointMark.getPartitions().stream() .filter(p -> p.getNextOffset() != UNINITIALIZED_OFFSET) .collect( Collectors.toMap( p -> new TopicPartition(p.getTopic(), p.getPartition()), p -> new OffsetAndMetadata(p.getNextOffset())))); } /** * Enqueue checkpoint mark to be committed to Kafka. This does not block until it is committed. * There could be a delay of up to KAFKA_POLL_TIMEOUT (1 second). Any checkpoint mark enqueued * earlier is dropped in favor of this checkpoint mark. Documentation for {@link * CheckpointMark * need to be committed. */ void finalizeCheckpointMarkAsync(KafkaCheckpointMark checkpointMark) { if (finalizedCheckpointMark.getAndSet(checkpointMark) != null) { checkpointMarkCommitsSkipped.inc(); } checkpointMarkCommitsEnqueued.inc(); } private void nextBatch() throws IOException { curBatch = Collections.emptyIterator(); ConsumerRecords<byte[], byte[]> records; try { records = availableRecordsQueue.poll( RECORDS_DEQUEUE_POLL_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOG.warn("{}: Unexpected", this, e); return; } if (records == null) { if (consumerPollException.get() != null) { throw new IOException("Exception while reading from Kafka", consumerPollException.get()); } return; } partitionStates.forEach(p -> p.recordIter = records.records(p.topicPartition).iterator()); curBatch = Iterators.cycle(new ArrayList<>(partitionStates)); } private void setupInitialOffset(PartitionState pState) { Read<K, V> spec = source.getSpec(); if (pState.nextOffset != UNINITIALIZED_OFFSET) { consumer.seek(pState.topicPartition, pState.nextOffset); } else { Instant startReadTime = spec.getStartReadTime(); if (startReadTime != null) { pState.nextOffset = consumerSpEL.offsetForTime(consumer, pState.topicPartition, spec.getStartReadTime()); consumer.seek(pState.topicPartition, pState.nextOffset); } else { pState.nextOffset = consumer.position(pState.topicPartition); } } } private void updateLatestOffsets() { for (PartitionState p : partitionStates) { try { Instant fetchTime = Instant.now(); consumerSpEL.evaluateSeek2End(offsetConsumer, p.topicPartition); long offset = offsetConsumer.position(p.topicPartition); p.setLatestOffset(offset, fetchTime); } catch (Exception e) { if (closed.get()) { break; } LOG.warn( "{}: exception while fetching latest offset for partition {}. will be retried.", this, p.topicPartition, e); } } LOG.debug("{}: backlog {}", this, getSplitBacklogBytes()); } private void reportBacklog() { long splitBacklogBytes = getSplitBacklogBytes(); if (splitBacklogBytes < 0) { splitBacklogBytes = UnboundedReader.BACKLOG_UNKNOWN; } backlogBytesOfSplit.set(splitBacklogBytes); long splitBacklogMessages = getSplitBacklogMessageCount(); if (splitBacklogMessages < 0) { splitBacklogMessages = UnboundedReader.BACKLOG_UNKNOWN; } backlogElementsOfSplit.set(splitBacklogMessages); } private long getSplitBacklogMessageCount() { long backlogCount = 0; for (PartitionState p : partitionStates) { long pBacklog = p.backlogMessageCount(); if (pBacklog == UnboundedReader.BACKLOG_UNKNOWN) { return UnboundedReader.BACKLOG_UNKNOWN; } backlogCount += pBacklog; } return backlogCount; } @VisibleForTesting Map<String, Object> getOffsetConsumerConfig() { Map<String, Object> offsetConsumerConfig = new HashMap<>(source.getSpec().getConsumerConfig()); offsetConsumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); Object groupId = source.getSpec().getConsumerConfig().get(ConsumerConfig.GROUP_ID_CONFIG); String offsetGroupId = String.format( "%s_offset_consumer_%d_%s", name, (new Random()).nextInt(Integer.MAX_VALUE), (groupId == null ? "none" : groupId)); offsetConsumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, offsetGroupId); if (source.getSpec().getOffsetConsumerConfig() != null) { offsetConsumerConfig.putAll(source.getSpec().getOffsetConsumerConfig()); } offsetConsumerConfig.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_uncommitted"); return offsetConsumerConfig; } @Override public void close() throws IOException { closed.set(true); consumerPollThread.shutdown(); offsetFetcherThread.shutdown(); boolean isShutdown = false; while (!isShutdown) { if (consumer != null) { consumer.wakeup(); } if (offsetConsumer != null) { offsetConsumer.wakeup(); } availableRecordsQueue.poll(); try { isShutdown = consumerPollThread.awaitTermination(10, TimeUnit.SECONDS) && offsetFetcherThread.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } if (!isShutdown) { LOG.warn("An internal thread is taking a long time to shutdown. will retry."); } } Closeables.close(keyDeserializerInstance, true); Closeables.close(valueDeserializerInstance, true); Closeables.close(offsetConsumer, true); Closeables.close(consumer, true); } }
Unrelated but useful: always move away "unlikely" paths ie cold ones out of the "main" method logic: it would increase chances to inline it (see http://normanmaurer.me/blog_in_progress/2013/11/07/Inline-all-the-Things/ that's still very actual and these things hasn't changed across JDK versions, really). Most inlining decisions on OpenJDK happens based on the bytecode size (although not always; sometime the native method size is important too), meaning that using any bytecode plugin viewer (I use https://plugins.jetbrains.com/plugin/9248-jclasslib-bytecode-viewer) can help checking how much the bytecode size change while grouping differently code into methods: eg ```java if (LOG.isTraceEnabled()) { traceActivate(...); } ``` this is going to reduce a bit the bytecode size of the original method, increasing its chance to be inlined in the common use case
public void activate(ContextState initialState) { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Activate %s %s\n\t...", initialState != null ? Integer.toHexString(initialState.hashCode()) : "new", stack); } if (initialState == null) { currentContext.set(new RequestContextState(new ConcurrentHashMap<>())); fireIfNotEmpty(initializedNotifier); } else { if (initialState instanceof RequestContextState) { currentContext.set((RequestContextState) initialState); } else { throw new IllegalArgumentException("Invalid initial state: " + initialState.getClass().getName()); } } }
String stack = Arrays.stream(Thread.currentThread().getStackTrace())
public void activate(ContextState initialState) { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Activate %s %s\n\t...", initialState != null ? Integer.toHexString(initialState.hashCode()) : "new", stack); } if (initialState == null) { currentContext.set(new RequestContextState(new ConcurrentHashMap<>())); fireIfNotEmpty(initializedNotifier); } else { if (initialState instanceof RequestContextState) { currentContext.set((RequestContextState) initialState); } else { throw new IllegalArgumentException("Invalid initial state: " + initialState.getClass().getName()); } } }
class RequestContext implements ManagedContext { private static final Logger LOG = Logger.getLogger("io.quarkus.arc.requestContext"); private final CurrentContext<RequestContextState> currentContext; private final LazyValue<Notifier<Object>> initializedNotifier; private final LazyValue<Notifier<Object>> beforeDestroyedNotifier; private final LazyValue<Notifier<Object>> destroyedNotifier; public RequestContext(CurrentContext<RequestContextState> currentContext) { this.currentContext = currentContext; this.initializedNotifier = new LazyValue<>(RequestContext::createInitializedNotifier); this.beforeDestroyedNotifier = new LazyValue<>(RequestContext::createBeforeDestroyedNotifier); this.destroyedNotifier = new LazyValue<>(RequestContext::createDestroyedNotifier); } @Override public Class<? extends Annotation> getScope() { return RequestScoped.class; } @SuppressWarnings("unchecked") @Override public <T> T getIfActive(Contextual<T> contextual, Function<Contextual<T>, CreationalContext<T>> creationalContextFun) { Objects.requireNonNull(contextual, "Contextual must not be null"); Objects.requireNonNull(creationalContextFun, "CreationalContext supplier must not be null"); InjectableBean<T> bean = (InjectableBean<T>) contextual; if (!Scopes.scopeMatches(this, bean)) { throw Scopes.scopeDoesNotMatchException(this, bean); } RequestContextState ctxState = currentContext.get(); if (ctxState == null) { return null; } ContextInstanceHandle<T> instance = (ContextInstanceHandle<T>) ctxState.map.get(contextual); if (instance == null) { CreationalContext<T> creationalContext = creationalContextFun.apply(contextual); instance = new ContextInstanceHandleImpl<T>((InjectableBean<T>) contextual, contextual.create(creationalContext), creationalContext); ctxState.map.put(contextual, instance); } return instance.get(); } @Override public <T> T get(Contextual<T> contextual, CreationalContext<T> creationalContext) { T result = getIfActive(contextual, CreationalContextImpl.unwrap(Objects.requireNonNull(creationalContext, "CreationalContext must not be null"))); if (result == null) { throw notActive(); } return result; } @SuppressWarnings("unchecked") @Override public <T> T get(Contextual<T> contextual) { Objects.requireNonNull(contextual, "Contextual must not be null"); InjectableBean<T> bean = (InjectableBean<T>) contextual; if (!Scopes.scopeMatches(this, bean)) { throw Scopes.scopeDoesNotMatchException(this, bean); } RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } ContextInstanceHandle<T> instance = (ContextInstanceHandle<T>) state.map.get(contextual); return instance == null ? null : instance.get(); } @Override public boolean isActive() { return currentContext.get() != null; } @Override public void destroy(Contextual<?> contextual) { RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } ContextInstanceHandle<?> instance = state.map.remove(contextual); if (instance != null) { instance.destroy(); } } @Override @Override public ContextState getState() { RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } return state; } public ContextState getStateIfActive() { return currentContext.get(); } @Override public void deactivate() { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Deactivate%s\n\t...", stack); } currentContext.remove(); } @Override public void destroy() { destroy(currentContext.get()); } @Override public void destroy(ContextState state) { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Destroy %s%s\n\t...", state != null ? Integer.toHexString(state.hashCode()) : "", stack); } if (state == null) { return; } if (state instanceof RequestContextState) { RequestContextState reqState = ((RequestContextState) state); reqState.isValid = false; synchronized (state) { Map<Contextual<?>, ContextInstanceHandle<?>> map = ((RequestContextState) state).map; try { fireIfNotEmpty(beforeDestroyedNotifier); } catch (Exception e) { LOG.warn("An error occurred during delivery of the @BeforeDestroyed(RequestScoped.class) event", e); } map.forEach(this::destroyContextElement); try { fireIfNotEmpty(destroyedNotifier); } catch (Exception e) { LOG.warn("An error occurred during delivery of the @Destroyed(RequestScoped.class) event", e); } map.clear(); } } else { throw new IllegalArgumentException("Invalid state implementation: " + state.getClass().getName()); } } private void destroyContextElement(Contextual<?> contextual, ContextInstanceHandle<?> contextInstanceHandle) { try { contextInstanceHandle.destroy(); } catch (Exception e) { throw new IllegalStateException("Unable to destroy instance" + contextInstanceHandle.get(), e); } } private void fireIfNotEmpty(LazyValue<Notifier<Object>> value) { Notifier<Object> notifier = value.get(); if (!notifier.isEmpty()) { notifier.notify(toString()); } } private ContextNotActiveException notActive() { String msg = "Request context is not active - you can activate the request context for a specific method using the @ActivateRequestContext interceptor binding"; return new ContextNotActiveException(msg); } private static Notifier<Object> createInitializedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(Initialized.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } private static Notifier<Object> createBeforeDestroyedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(BeforeDestroyed.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } private static Notifier<Object> createDestroyedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(Destroyed.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } static class RequestContextState implements ContextState { private final Map<Contextual<?>, ContextInstanceHandle<?>> map; private volatile boolean isValid; RequestContextState(ConcurrentMap<Contextual<?>, ContextInstanceHandle<?>> value) { this.map = Objects.requireNonNull(value); this.isValid = true; } @Override public Map<InjectableBean<?>, Object> getContextualInstances() { return map.values().stream() .collect(Collectors.toUnmodifiableMap(ContextInstanceHandle::getBean, ContextInstanceHandle::get)); } @Override public boolean isValid() { return isValid; } } }
class RequestContext implements ManagedContext { private static final Logger LOG = Logger.getLogger("io.quarkus.arc.requestContext"); private final CurrentContext<RequestContextState> currentContext; private final LazyValue<Notifier<Object>> initializedNotifier; private final LazyValue<Notifier<Object>> beforeDestroyedNotifier; private final LazyValue<Notifier<Object>> destroyedNotifier; public RequestContext(CurrentContext<RequestContextState> currentContext) { this.currentContext = currentContext; this.initializedNotifier = new LazyValue<>(RequestContext::createInitializedNotifier); this.beforeDestroyedNotifier = new LazyValue<>(RequestContext::createBeforeDestroyedNotifier); this.destroyedNotifier = new LazyValue<>(RequestContext::createDestroyedNotifier); } @Override public Class<? extends Annotation> getScope() { return RequestScoped.class; } @SuppressWarnings("unchecked") @Override public <T> T getIfActive(Contextual<T> contextual, Function<Contextual<T>, CreationalContext<T>> creationalContextFun) { Objects.requireNonNull(contextual, "Contextual must not be null"); Objects.requireNonNull(creationalContextFun, "CreationalContext supplier must not be null"); InjectableBean<T> bean = (InjectableBean<T>) contextual; if (!Scopes.scopeMatches(this, bean)) { throw Scopes.scopeDoesNotMatchException(this, bean); } RequestContextState ctxState = currentContext.get(); if (ctxState == null) { return null; } ContextInstanceHandle<T> instance = (ContextInstanceHandle<T>) ctxState.map.get(contextual); if (instance == null) { CreationalContext<T> creationalContext = creationalContextFun.apply(contextual); instance = new ContextInstanceHandleImpl<T>((InjectableBean<T>) contextual, contextual.create(creationalContext), creationalContext); ctxState.map.put(contextual, instance); } return instance.get(); } @Override public <T> T get(Contextual<T> contextual, CreationalContext<T> creationalContext) { T result = getIfActive(contextual, CreationalContextImpl.unwrap(Objects.requireNonNull(creationalContext, "CreationalContext must not be null"))); if (result == null) { throw notActive(); } return result; } @SuppressWarnings("unchecked") @Override public <T> T get(Contextual<T> contextual) { Objects.requireNonNull(contextual, "Contextual must not be null"); InjectableBean<T> bean = (InjectableBean<T>) contextual; if (!Scopes.scopeMatches(this, bean)) { throw Scopes.scopeDoesNotMatchException(this, bean); } RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } ContextInstanceHandle<T> instance = (ContextInstanceHandle<T>) state.map.get(contextual); return instance == null ? null : instance.get(); } @Override public boolean isActive() { return currentContext.get() != null; } @Override public void destroy(Contextual<?> contextual) { RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } ContextInstanceHandle<?> instance = state.map.remove(contextual); if (instance != null) { instance.destroy(); } } @Override @Override public ContextState getState() { RequestContextState state = currentContext.get(); if (state == null) { throw notActive(); } return state; } public ContextState getStateIfActive() { return currentContext.get(); } @Override public void deactivate() { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Deactivate%s\n\t...", stack); } currentContext.remove(); } @Override public void destroy() { destroy(currentContext.get()); } @Override public void destroy(ContextState state) { if (LOG.isTraceEnabled()) { String stack = Arrays.stream(Thread.currentThread().getStackTrace()) .skip(2) .limit(7) .map(se -> "\n\t" + se.toString()) .collect(Collectors.joining()); LOG.tracef("Destroy %s%s\n\t...", state != null ? Integer.toHexString(state.hashCode()) : "", stack); } if (state == null) { return; } if (state instanceof RequestContextState) { RequestContextState reqState = ((RequestContextState) state); reqState.isValid = false; synchronized (state) { Map<Contextual<?>, ContextInstanceHandle<?>> map = ((RequestContextState) state).map; try { fireIfNotEmpty(beforeDestroyedNotifier); } catch (Exception e) { LOG.warn("An error occurred during delivery of the @BeforeDestroyed(RequestScoped.class) event", e); } map.forEach(this::destroyContextElement); try { fireIfNotEmpty(destroyedNotifier); } catch (Exception e) { LOG.warn("An error occurred during delivery of the @Destroyed(RequestScoped.class) event", e); } map.clear(); } } else { throw new IllegalArgumentException("Invalid state implementation: " + state.getClass().getName()); } } private void destroyContextElement(Contextual<?> contextual, ContextInstanceHandle<?> contextInstanceHandle) { try { contextInstanceHandle.destroy(); } catch (Exception e) { throw new IllegalStateException("Unable to destroy instance" + contextInstanceHandle.get(), e); } } private void fireIfNotEmpty(LazyValue<Notifier<Object>> value) { Notifier<Object> notifier = value.get(); if (!notifier.isEmpty()) { notifier.notify(toString()); } } private ContextNotActiveException notActive() { String msg = "Request context is not active - you can activate the request context for a specific method using the @ActivateRequestContext interceptor binding"; return new ContextNotActiveException(msg); } private static Notifier<Object> createInitializedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(Initialized.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } private static Notifier<Object> createBeforeDestroyedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(BeforeDestroyed.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } private static Notifier<Object> createDestroyedNotifier() { return EventImpl.createNotifier(Object.class, Object.class, new HashSet<>(Arrays.asList(Destroyed.Literal.REQUEST, Any.Literal.INSTANCE)), ArcContainerImpl.instance(), false); } static class RequestContextState implements ContextState { private final Map<Contextual<?>, ContextInstanceHandle<?>> map; private volatile boolean isValid; RequestContextState(ConcurrentMap<Contextual<?>, ContextInstanceHandle<?>> value) { this.map = Objects.requireNonNull(value); this.isValid = true; } @Override public Map<InjectableBean<?>, Object> getContextualInstances() { return map.values().stream() .collect(Collectors.toUnmodifiableMap(ContextInstanceHandle::getBean, ContextInstanceHandle::get)); } @Override public boolean isValid() { return isValid; } } }
Isn't it better if we move this logic to the function definition node's overridden method itself? As the return type of a function represents the particular function, I feel that this logic does not belong here.
public void visit(ReturnStatementNode returnStatementNode) { this.semanticModel.typeOf(returnStatementNode).ifPresent(this::checkAndSetTypeResult); if (resultFound) { return; } returnStatementNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { FunctionTypeSymbol functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; functionTypeSymbol.returnTypeDescriptor().ifPresentOrElse(this::checkAndSetTypeResult, this::resetResult); } else { resetResult(); } }
functionTypeSymbol.returnTypeDescriptor().ifPresentOrElse(this::checkAndSetTypeResult, this::resetResult);
public void visit(ReturnStatementNode returnStatementNode) { this.semanticModel.typeOf(returnStatementNode).ifPresent(this::checkAndSetTypeResult); if (resultFound) { return; } returnStatementNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { FunctionTypeSymbol functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; functionTypeSymbol.returnTypeDescriptor().ifPresentOrElse(this::checkAndSetTypeResult, this::resetResult); } else { resetResult(); } }
class FunctionCallExpressionTypeFinder extends NodeVisitor { private final SemanticModel semanticModel; private TypeSymbol returnTypeSymbol; private TypeDescKind returnTypeDescKind; private boolean resultFound = false; public FunctionCallExpressionTypeFinder(SemanticModel semanticModel) { this.semanticModel = semanticModel; } public void findTypeOf(FunctionCallExpressionNode functionCallExpressionNode) { functionCallExpressionNode.accept(this); } @Override public void visit(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { Symbol symbol = semanticModel.symbol(moduleVariableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(AssignmentStatementNode assignmentStatementNode) { Symbol symbol = semanticModel.symbol(assignmentStatementNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); if (resultFound) { return; } assignmentStatementNode.varRef().accept(this); } @Override public void visit(VariableDeclarationNode variableDeclarationNode) { Symbol symbol = semanticModel.symbol(variableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(SpecificFieldNode specificFieldNode) { semanticModel.symbol(specificFieldNode) .map(symbol -> (RecordFieldSymbol) symbol) .ifPresent(recordFieldSymbol -> checkAndSetTypeResult(recordFieldSymbol.typeDescriptor())); } @Override public void visit(BinaryExpressionNode binaryExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(binaryExpressionNode.lhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } typeSymbol = semanticModel.typeOf(binaryExpressionNode.rhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(LetExpressionNode letExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(letExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } letExpressionNode.parent().accept(this); } @Override public void visit(LetVariableDeclarationNode letVariableDeclarationNode) { Optional<Symbol> symbol1 = semanticModel.symbol(letVariableDeclarationNode); symbol1.map(symbol -> (VariableSymbol) symbol) .map(VariableSymbol::typeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(StartActionNode startActionNode) { startActionNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUTURE) { FutureTypeSymbol futureTypeSymbol = (FutureTypeSymbol) returnTypeSymbol; TypeSymbol typeSymbol = futureTypeSymbol.typeParameter().orElse(null); checkAndSetTypeResult(typeSymbol); } } @Override public void visit(FunctionCallExpressionNode fnCallExprNode) { fnCallExprNode.functionName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(fnCallExprNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } fnCallExprNode.parent().accept(this); } @Override public void visit(MethodCallExpressionNode methodCallExpressionNode) { methodCallExpressionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(methodCallExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { remoteMethodCallActionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(remoteMethodCallActionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { semanticModel.symbol(simpleNameReferenceNode) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ErrorConstructorExpressionNode errorConstructorExpressionNode) { semanticModel.typeOf(errorConstructorExpressionNode) .map(CommonUtil::getRawType) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(PositionalArgumentNode positionalArgumentNode) { positionalArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { checkAndSetTypeDescResult(TypeDescKind.STRING); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty() || params.get().isEmpty()) { return; } SeparatedNodeList<FunctionArgumentNode> arguments; switch (positionalArgumentNode.parent().kind()) { case METHOD_CALL: MethodCallExpressionNode methodCallExpressionNode = (MethodCallExpressionNode) positionalArgumentNode.parent(); arguments = methodCallExpressionNode.arguments(); break; case FUNCTION_CALL: FunctionCallExpressionNode functionCallExpressionNode = (FunctionCallExpressionNode) positionalArgumentNode.parent(); arguments = functionCallExpressionNode.arguments(); break; case REMOTE_METHOD_CALL_ACTION: RemoteMethodCallActionNode remoteMethodCallActionNode = (RemoteMethodCallActionNode) positionalArgumentNode.parent(); arguments = remoteMethodCallActionNode.arguments(); break; case PARENTHESIZED_ARG_LIST: ParenthesizedArgList parenthesizedArgList = (ParenthesizedArgList) positionalArgumentNode.parent(); arguments = parenthesizedArgList.arguments(); break; default: return; } if (arguments != null) { int argIndex = -1; for (int i = 0; i < arguments.size(); i++) { if (arguments.get(i).equals(positionalArgumentNode)) { argIndex = i; break; } } if (argIndex < 0 || params.get().size() < argIndex + 1) { return; } ParameterSymbol parameterSymbol = params.get().get(argIndex); checkAndSetTypeResult(parameterSymbol.typeDescriptor()); } } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { ErrorTypeSymbol errorTypeSymbol = (ErrorTypeSymbol) returnTypeSymbol; TypeSymbol detailType = CommonUtil.getRawType(errorTypeSymbol.detailTypeDescriptor()); if (detailType.typeKind() != TypeDescKind.RECORD) { checkAndSetTypeDescResult(TypeDescKind.ANYDATA); return; } RecordTypeSymbol recordTypeSymbol = (RecordTypeSymbol) detailType; RecordFieldSymbol fieldSymbol = recordTypeSymbol.fieldDescriptors() .get(namedArgumentNode.argumentName().name().text()); if (fieldSymbol == null) { resetResult(); return; } checkAndSetTypeResult(fieldSymbol.typeDescriptor()); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty()) { return; } params.get().stream().filter(param -> param.getName().isPresent() && param.getName().get().equals(namedArgumentNode.argumentName().name().text())).findFirst() .ifPresent(parameterSymbol -> this.checkAndSetTypeResult(parameterSymbol.typeDescriptor())); } /** * Returns the parameter symbols once the {@link * * @return Optional parameter symbol list */ private Optional<List<ParameterSymbol>> getParameterSymbols() { FunctionTypeSymbol functionTypeSymbol; if (returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; } else if (returnTypeSymbol.kind() == SymbolKind.CLASS) { Optional<MethodSymbol> methodSymbol = ((ClassSymbol) returnTypeSymbol).initMethod(); if (methodSymbol.isEmpty()) { return Optional.empty(); } functionTypeSymbol = methodSymbol.get().typeDescriptor(); } else { return Optional.empty(); } return functionTypeSymbol.params(); } @Override public void visit(ParenthesizedArgList parenthesizedArgList) { parenthesizedArgList.parent().accept(this); } @Override public void visit(ExplicitNewExpressionNode explicitNewExpressionNode) { semanticModel.typeOf(explicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ImplicitNewExpressionNode implicitNewExpressionNode) { semanticModel.typeOf(implicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionDefinitionNode node) { semanticModel.symbol(node) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionBodyBlockNode node) { node.parent().accept(this); } @Override @Override public void visit(UnaryExpressionNode unaryExpressionNode) { semanticModel.typeOf(unaryExpressionNode).ifPresent(this::checkAndSetTypeResult); if (!resultFound) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } } @Override public void visit(IfElseStatementNode ifElseStatementNode) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } @Override public void visit(FailStatementNode failStatementNode) { checkAndSetTypeDescResult(TypeDescKind.ERROR); } @Override public void visit(WhileStatementNode whileStatementNode) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } @Override protected void visitSyntaxNode(Node node) { } private void checkAndSetTypeResult(TypeSymbol typeSymbol) { if (typeSymbol == null) { return; } this.returnTypeSymbol = typeSymbol; if (typeSymbol.typeKind() != TypeDescKind.COMPILATION_ERROR) { resultFound = true; } } private void checkAndSetTypeDescResult(TypeDescKind typeDescKind) { if (typeDescKind == null) { return; } this.returnTypeSymbol = null; this.returnTypeDescKind = typeDescKind; this.resultFound = true; } private void resetResult() { this.returnTypeDescKind = null; this.returnTypeSymbol = null; this.resultFound = false; } /** * Get the type symbol of the return type of the function call expression provided to this instance. Should be * invoked after invoking {@link * * @return Optional type symbol of the return type of function call expression */ public Optional<TypeSymbol> getReturnTypeSymbol() { return Optional.ofNullable(returnTypeSymbol); } /** * Get the type descriptor kind of the return type of the function call expression. Should be used when * {@link * * @return Return type descriptor kind */ public Optional<TypeDescKind> getReturnTypeDescKind() { return Optional.ofNullable(returnTypeDescKind); } }
class FunctionCallExpressionTypeFinder extends NodeVisitor { private final SemanticModel semanticModel; private TypeSymbol returnTypeSymbol; private TypeDescKind returnTypeDescKind; private boolean resultFound = false; public FunctionCallExpressionTypeFinder(SemanticModel semanticModel) { this.semanticModel = semanticModel; } public void findTypeOf(FunctionCallExpressionNode functionCallExpressionNode) { functionCallExpressionNode.accept(this); } @Override public void visit(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { Symbol symbol = semanticModel.symbol(moduleVariableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(AssignmentStatementNode assignmentStatementNode) { Symbol symbol = semanticModel.symbol(assignmentStatementNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); if (resultFound) { return; } assignmentStatementNode.varRef().accept(this); } @Override public void visit(VariableDeclarationNode variableDeclarationNode) { Symbol symbol = semanticModel.symbol(variableDeclarationNode).orElse(null); TypeSymbol typeDescriptor = SymbolUtil.getTypeDescriptor(symbol).orElse(null); checkAndSetTypeResult(typeDescriptor); } @Override public void visit(SpecificFieldNode specificFieldNode) { semanticModel.symbol(specificFieldNode) .map(symbol -> (RecordFieldSymbol) symbol) .ifPresent(recordFieldSymbol -> checkAndSetTypeResult(recordFieldSymbol.typeDescriptor())); } @Override public void visit(BinaryExpressionNode binaryExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(binaryExpressionNode.lhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } typeSymbol = semanticModel.typeOf(binaryExpressionNode.rhsExpr()).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(LetExpressionNode letExpressionNode) { TypeSymbol typeSymbol = semanticModel.typeOf(letExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } letExpressionNode.parent().accept(this); } @Override public void visit(LetVariableDeclarationNode letVariableDeclarationNode) { Optional<Symbol> symbol1 = semanticModel.symbol(letVariableDeclarationNode); symbol1.map(symbol -> (VariableSymbol) symbol) .map(VariableSymbol::typeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(StartActionNode startActionNode) { startActionNode.parent().accept(this); if (resultFound && returnTypeSymbol.typeKind() == TypeDescKind.FUTURE) { FutureTypeSymbol futureTypeSymbol = (FutureTypeSymbol) returnTypeSymbol; TypeSymbol typeSymbol = futureTypeSymbol.typeParameter().orElse(null); checkAndSetTypeResult(typeSymbol); } } @Override public void visit(FunctionCallExpressionNode fnCallExprNode) { fnCallExprNode.functionName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(fnCallExprNode).orElse(null); checkAndSetTypeResult(typeSymbol); if (resultFound) { return; } fnCallExprNode.parent().accept(this); } @Override public void visit(MethodCallExpressionNode methodCallExpressionNode) { methodCallExpressionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(methodCallExpressionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(RemoteMethodCallActionNode remoteMethodCallActionNode) { remoteMethodCallActionNode.methodName().accept(this); if (resultFound) { return; } TypeSymbol typeSymbol = semanticModel.typeOf(remoteMethodCallActionNode).orElse(null); checkAndSetTypeResult(typeSymbol); } @Override public void visit(SimpleNameReferenceNode simpleNameReferenceNode) { semanticModel.symbol(simpleNameReferenceNode) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ErrorConstructorExpressionNode errorConstructorExpressionNode) { semanticModel.typeOf(errorConstructorExpressionNode) .map(CommonUtil::getRawType) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(PositionalArgumentNode positionalArgumentNode) { positionalArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { checkAndSetTypeDescResult(TypeDescKind.STRING); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty() || params.get().isEmpty()) { return; } SeparatedNodeList<FunctionArgumentNode> arguments; switch (positionalArgumentNode.parent().kind()) { case METHOD_CALL: MethodCallExpressionNode methodCallExpressionNode = (MethodCallExpressionNode) positionalArgumentNode.parent(); arguments = methodCallExpressionNode.arguments(); break; case FUNCTION_CALL: FunctionCallExpressionNode functionCallExpressionNode = (FunctionCallExpressionNode) positionalArgumentNode.parent(); arguments = functionCallExpressionNode.arguments(); break; case REMOTE_METHOD_CALL_ACTION: RemoteMethodCallActionNode remoteMethodCallActionNode = (RemoteMethodCallActionNode) positionalArgumentNode.parent(); arguments = remoteMethodCallActionNode.arguments(); break; case PARENTHESIZED_ARG_LIST: ParenthesizedArgList parenthesizedArgList = (ParenthesizedArgList) positionalArgumentNode.parent(); arguments = parenthesizedArgList.arguments(); break; default: return; } if (arguments != null) { int argIndex = -1; for (int i = 0; i < arguments.size(); i++) { if (arguments.get(i).equals(positionalArgumentNode)) { argIndex = i; break; } } if (argIndex < 0 || params.get().size() < argIndex + 1) { return; } ParameterSymbol parameterSymbol = params.get().get(argIndex); checkAndSetTypeResult(parameterSymbol.typeDescriptor()); } } @Override public void visit(NamedArgumentNode namedArgumentNode) { namedArgumentNode.parent().accept(this); if (!resultFound) { return; } if (returnTypeSymbol.typeKind() == TypeDescKind.ERROR) { ErrorTypeSymbol errorTypeSymbol = (ErrorTypeSymbol) returnTypeSymbol; TypeSymbol detailType = CommonUtil.getRawType(errorTypeSymbol.detailTypeDescriptor()); if (detailType.typeKind() != TypeDescKind.RECORD) { checkAndSetTypeDescResult(TypeDescKind.ANYDATA); return; } RecordTypeSymbol recordTypeSymbol = (RecordTypeSymbol) detailType; RecordFieldSymbol fieldSymbol = recordTypeSymbol.fieldDescriptors() .get(namedArgumentNode.argumentName().name().text()); if (fieldSymbol == null) { resetResult(); return; } checkAndSetTypeResult(fieldSymbol.typeDescriptor()); return; } Optional<List<ParameterSymbol>> params = getParameterSymbols(); if (params.isEmpty()) { return; } params.get().stream().filter(param -> param.getName().isPresent() && param.getName().get().equals(namedArgumentNode.argumentName().name().text())).findFirst() .ifPresent(parameterSymbol -> this.checkAndSetTypeResult(parameterSymbol.typeDescriptor())); } /** * Returns the parameter symbols once the {@link * * @return Optional parameter symbol list */ private Optional<List<ParameterSymbol>> getParameterSymbols() { FunctionTypeSymbol functionTypeSymbol; if (returnTypeSymbol.typeKind() == TypeDescKind.FUNCTION) { functionTypeSymbol = (FunctionTypeSymbol) returnTypeSymbol; } else if (returnTypeSymbol.kind() == SymbolKind.CLASS) { Optional<MethodSymbol> methodSymbol = ((ClassSymbol) returnTypeSymbol).initMethod(); if (methodSymbol.isEmpty()) { return Optional.empty(); } functionTypeSymbol = methodSymbol.get().typeDescriptor(); } else { return Optional.empty(); } return functionTypeSymbol.params(); } @Override public void visit(ParenthesizedArgList parenthesizedArgList) { parenthesizedArgList.parent().accept(this); } @Override public void visit(ExplicitNewExpressionNode explicitNewExpressionNode) { semanticModel.typeOf(explicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(ImplicitNewExpressionNode implicitNewExpressionNode) { semanticModel.typeOf(implicitNewExpressionNode) .flatMap(typeSymbol -> Optional.of(CommonUtil.getRawType(typeSymbol))) .stream().findFirst().ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionDefinitionNode node) { semanticModel.symbol(node) .flatMap(SymbolUtil::getTypeDescriptor) .ifPresent(this::checkAndSetTypeResult); } @Override public void visit(FunctionBodyBlockNode node) { node.parent().accept(this); } @Override @Override public void visit(UnaryExpressionNode unaryExpressionNode) { semanticModel.typeOf(unaryExpressionNode).ifPresent(this::checkAndSetTypeResult); if (!resultFound) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } } @Override public void visit(IfElseStatementNode ifElseStatementNode) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } @Override public void visit(FailStatementNode failStatementNode) { checkAndSetTypeDescResult(TypeDescKind.ERROR); } @Override public void visit(WhileStatementNode whileStatementNode) { checkAndSetTypeDescResult(TypeDescKind.BOOLEAN); } @Override protected void visitSyntaxNode(Node node) { } private void checkAndSetTypeResult(TypeSymbol typeSymbol) { if (typeSymbol == null) { return; } this.returnTypeSymbol = typeSymbol; if (typeSymbol.typeKind() != TypeDescKind.COMPILATION_ERROR) { resultFound = true; } } private void checkAndSetTypeDescResult(TypeDescKind typeDescKind) { if (typeDescKind == null) { return; } this.returnTypeSymbol = null; this.returnTypeDescKind = typeDescKind; this.resultFound = true; } private void resetResult() { this.returnTypeDescKind = null; this.returnTypeSymbol = null; this.resultFound = false; } /** * Get the type symbol of the return type of the function call expression provided to this instance. Should be * invoked after invoking {@link * * @return Optional type symbol of the return type of function call expression */ public Optional<TypeSymbol> getReturnTypeSymbol() { return Optional.ofNullable(returnTypeSymbol); } /** * Get the type descriptor kind of the return type of the function call expression. Should be used when * {@link * * @return Return type descriptor kind */ public Optional<TypeDescKind> getReturnTypeDescKind() { return Optional.ofNullable(returnTypeDescKind); } }
I think this block should be the `finally` block of the `try { test.run() } finally { ... }`
protected void runTest(RunnableWithException test) { Throwable testFailure = null; try { test.run(); } catch (Throwable t) { testFailure = t; } try { Deadline deadline = Deadline.now().plus(Duration.ofSeconds(10)); boolean isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); while (deadline.hasTimeLeft() && isAnyJobRunning) { try { Thread.sleep(500); } catch (InterruptedException e) { Assert.fail("Should not happen"); } isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); } if (isAnyJobRunning) { final List<String> runningApps = yarnClient.getApplications().stream() .filter(YarnTestBase::isApplicationRunning) .map(app -> "App " + app.getApplicationId() + " is in state " + app.getYarnApplicationState() + '.') .collect(Collectors.toList()); if (!runningApps.isEmpty()) { Assert.fail("There is at least one application on the cluster that is not finished." + runningApps); } } } catch (Throwable t) { throw new AssertionError(ExceptionUtils.firstOrSuppressed(t, testFailure)); } }
try {
protected void runTest(RunnableWithException test) throws Exception { try (final CleanupYarnApplication ignored = new CleanupYarnApplication()) { test.run(); } }
class YarnTestBase extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class); protected static final PrintStream ORIGINAL_STDOUT = System.out; protected static final PrintStream ORIGINAL_STDERR = System.err; private static final InputStream ORIGINAL_STDIN = System.in; protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name"; protected static final int NUM_NODEMANAGERS = 2; /** The tests are scanning for these strings in the final output. */ protected static final String[] PROHIBITED_STRINGS = { "Exception", "Started SelectChannelConnector@0.0.0.0:8081" }; /** These strings are white-listed, overriding the prohibited strings. */ protected static final String[] WHITELISTED_STRINGS = { "akka.remote.RemoteTransportExceptionNoStackTrace", "java.lang.InterruptedException", "Remote connection to [null] failed with java.net.ConnectException: Connection refused", "Remote connection to [null] failed with java.nio.channels.NotYetConnectedException", "java.io.IOException: Connection reset by peer", "java.util.concurrent.RejectedExecutionException: Worker has already been shutdown", "org.apache.flink.util.FlinkException: Stopping JobMaster", "org.apache.flink.util.FlinkException: JobManager is shutting down.", "lost the leadership." }; @ClassRule public static TemporaryFolder tmp = new TemporaryFolder(); protected static MiniYARNCluster yarnCluster = null; /** * Uberjar (fat jar) file of Flink. */ protected static File flinkUberjar; protected static final YarnConfiguration YARN_CONFIGURATION; /** * lib/ folder of the flink distribution. */ protected static File flinkLibFolder; /** * Temporary folder where Flink configurations will be kept for secure run. */ protected static File tempConfPathForSecureRun = null; protected static File flinkShadedHadoopDir; protected static File yarnSiteXML = null; private YarnClient yarnClient = null; private static org.apache.flink.configuration.Configuration globalConfiguration; protected org.apache.flink.configuration.Configuration flinkConfiguration; static { YARN_CONFIGURATION = new YarnConfiguration(); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 32); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); YARN_CONFIGURATION.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4); YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600); YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_VCORES, 666); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); } public static void populateYarnSecureConfigurations(Configuration conf, String principal, String keytab) { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); conf.set(YarnConfiguration.RM_KEYTAB, keytab); conf.set(YarnConfiguration.RM_PRINCIPAL, principal); conf.set(YarnConfiguration.NM_KEYTAB, keytab); conf.set(YarnConfiguration.NM_PRINCIPAL, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]"); } @Before public void setupYarnClient() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); } /** * Sleep a bit between the tests (we are re-using the YARN cluster for the tests). */ @After public void shutdownYarnClient() { yarnClient.stop(); } private static boolean isApplicationRunning(ApplicationReport app) { final YarnApplicationState yarnApplicationState = app.getYarnApplicationState(); return yarnApplicationState != YarnApplicationState.FINISHED && app.getYarnApplicationState() != YarnApplicationState.KILLED && app.getYarnApplicationState() != YarnApplicationState.FAILED; } @Nullable protected YarnClient getYarnClient() { return yarnClient; } protected static YarnConfiguration getYarnConfiguration() { return YARN_CONFIGURATION; } /** * Locate a file or directory. */ public static File findFile(String startAt, FilenameFilter fnf) { File root = new File(startAt); String[] files = root.list(); if (files == null) { return null; } for (String file : files) { File f = new File(startAt + File.separator + file); if (f.isDirectory()) { File r = findFile(f.getAbsolutePath(), fnf); if (r != null) { return r; } } else if (fnf.accept(f.getParentFile(), f.getName())) { return f; } } return null; } @Nonnull YarnClusterDescriptor createYarnClusterDescriptor(org.apache.flink.configuration.Configuration flinkConfiguration) { final YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor( flinkConfiguration, YARN_CONFIGURATION, CliFrontend.getConfigurationDirectoryFromEnv(), yarnClient, true); yarnClusterDescriptor.setLocalJarPath(new Path(flinkUberjar.toURI())); yarnClusterDescriptor.addShipFiles(Collections.singletonList(flinkLibFolder)); return yarnClusterDescriptor; } /** * Filter to find root dir of the flink-yarn dist. */ public static class RootDirFilenameFilter implements FilenameFilter { @Override public boolean accept(File dir, String name) { return name.startsWith("flink-dist") && name.endsWith(".jar") && dir.toString().contains("/lib"); } } /** * A simple {@link FilenameFilter} that only accepts files if their name contains every string in the array passed * to the constructor. */ public static class ContainsName implements FilenameFilter { private String[] names; private String excludeInPath = null; /** * @param names which have to be included in the filename. */ public ContainsName(String[] names) { this.names = names; } public ContainsName(String[] names, String excludeInPath) { this.names = names; this.excludeInPath = excludeInPath; } @Override public boolean accept(File dir, String name) { if (excludeInPath == null) { for (String n: names) { if (!name.contains(n)) { return false; } } return true; } else { for (String n: names) { if (!name.contains(n)) { return false; } } return !dir.toString().contains(excludeInPath); } } } public static void writeYarnSiteConfigXML(Configuration yarnConf, File targetFolder) throws IOException { yarnSiteXML = new File(targetFolder, "/yarn-site.xml"); try (FileWriter writer = new FileWriter(yarnSiteXML)) { yarnConf.writeXml(writer); writer.flush(); } } /** * This method checks the written TaskManager and JobManager log files * for exceptions. * * <p>WARN: Please make sure the tool doesn't find old logfiles from previous test runs. * So always run "mvn clean" before running the tests here. * */ public static void ensureNoProhibitedStringInLogFiles(final String[] prohibited, final String[] whitelisted) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to exist", cwd.exists()); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to be a directory", cwd.isDirectory()); List<String> prohibitedExcerpts = new ArrayList<>(); File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { File f = new File(dir.getAbsolutePath() + "/" + name); try { BufferingScanner scanner = new BufferingScanner(new Scanner(f), 10); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String aProhibited : prohibited) { if (lineFromFile.contains(aProhibited)) { boolean whitelistedFound = false; for (String white : whitelisted) { if (lineFromFile.contains(white)) { whitelistedFound = true; break; } } if (!whitelistedFound) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOG.error(fatal, "Prohibited String '{}' in line '{}'", aProhibited, lineFromFile); StringBuilder logExcerpt = new StringBuilder(); logExcerpt.append(System.lineSeparator()); for (String previousLine : scanner.getPreviousLines()) { logExcerpt.append(previousLine); logExcerpt.append(System.lineSeparator()); } logExcerpt.append(lineFromFile); logExcerpt.append(System.lineSeparator()); while (scanner.hasNextLine()) { String line = scanner.nextLine(); logExcerpt.append(line); logExcerpt.append(System.lineSeparator()); if (line.isEmpty() || (!Character.isWhitespace(line.charAt(0)) && !line.startsWith("Caused by"))) { for (int x = 0; x < 10 && scanner.hasNextLine(); x++) { logExcerpt.append(scanner.nextLine()); logExcerpt.append(System.lineSeparator()); } break; } } prohibitedExcerpts.add(logExcerpt.toString()); return true; } } } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { Scanner scanner = null; try { scanner = new Scanner(foundFile); } catch (FileNotFoundException e) { Assert.fail("Unable to locate file: " + e.getMessage() + " file: " + foundFile.getAbsolutePath()); } LOG.warn("Found a file with a prohibited string. Printing contents:"); while (scanner.hasNextLine()) { LOG.warn("LINE: " + scanner.nextLine()); } Assert.fail( "Found a file " + foundFile + " with a prohibited string (one of " + Arrays.toString(prohibited) + "). " + "Excerpts:" + System.lineSeparator() + prohibitedExcerpts); } } public static boolean verifyStringsInNamedLogFiles( final String[] mustHave, final String fileName) { List<String> mustHaveList = Arrays.asList(mustHave); File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (fileName != null && !name.equals(fileName)) { return false; } File f = new File(dir.getAbsolutePath() + "/" + name); LOG.info("Searching in {}", f.getAbsolutePath()); try { Set<String> foundSet = new HashSet<>(mustHave.length); Scanner scanner = new Scanner(f); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String str : mustHave) { if (lineFromFile.contains(str)) { foundSet.add(str); } } if (foundSet.containsAll(mustHaveList)) { return true; } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { LOG.info("Found string {} in {}.", Arrays.toString(mustHave), foundFile.getAbsolutePath()); return true; } else { return false; } } public static void sleep(int time) { try { Thread.sleep(time); } catch (InterruptedException e) { LOG.warn("Interruped", e); } } public static int getRunningContainers() { int count = 0; for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) { NodeManager nm = yarnCluster.getNodeManager(nmId); ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers(); count += containers.size(); } return count; } public static void startYARNSecureMode(YarnConfiguration conf, String principal, String keytab) { start(conf, principal, keytab); } public static void startYARNWithConfig(YarnConfiguration conf) { start(conf, null, null); } private static void start(YarnConfiguration conf, String principal, String keytab) { File homeDir = null; try { homeDir = tmp.newFolder(); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } System.setProperty("user.home", homeDir.getAbsolutePath()); String uberjarStartLoc = ".."; LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc).getAbsolutePath()); flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter()); Assert.assertNotNull("Flink uberjar not found", flinkUberjar); String flinkDistRootDir = flinkUberjar.getParentFile().getParent(); flinkLibFolder = flinkUberjar.getParentFile(); flinkShadedHadoopDir = Paths.get("target/shaded-hadoop").toFile(); Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder); Assert.assertTrue("lib folder not found", flinkLibFolder.exists()); Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory()); if (!flinkUberjar.exists()) { Assert.fail("Unable to locate yarn-uberjar.jar"); } try { LOG.info("Starting up MiniYARNCluster"); if (yarnCluster == null) { final String testName = conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY); yarnCluster = new MiniYARNCluster( testName == null ? "YarnTest_" + UUID.randomUUID() : testName, NUM_NODEMANAGERS, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } Map<String, String> map = new HashMap<String, String>(System.getenv()); File flinkConfDirPath = findFile(flinkDistRootDir, new ContainsName(new String[]{"flink-conf.yaml"})); Assert.assertNotNull(flinkConfDirPath); final String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath(); globalConfiguration = GlobalConfiguration.loadConfiguration(confDirPath); tempConfPathForSecureRun = tmp.newFolder("conf"); FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun); BootstrapTools.writeConfiguration( globalConfiguration, new File(tempConfPathForSecureRun, "flink-conf.yaml")); String configDir = tempConfPathForSecureRun.getAbsolutePath(); LOG.info("Temporary Flink configuration directory to be used for secure test: {}", configDir); Assert.assertNotNull(configDir); map.put(ConfigConstants.ENV_FLINK_CONF_DIR, configDir); File targetTestClassesFolder = new File("target/test-classes"); writeYarnSiteConfigXML(conf, targetTestClassesFolder); map.put("IN_TESTS", "yes we are in tests"); map.put("YARN_CONF_DIR", targetTestClassesFolder.getAbsolutePath()); TestBaseUtils.setEnv(map); Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED); while (!yarnCluster.waitForNodeManagersToConnect(500)) { LOG.info("Waiting for Nodemanagers to connect"); } } catch (Exception ex) { ex.printStackTrace(); LOG.error("setup failure", ex); Assert.fail(); } } /** * Default @BeforeClass impl. Overwrite this for passing a different configuration */ @BeforeClass public static void setup() throws Exception { startYARNWithConfig(YARN_CONFIGURATION); } protected static ByteArrayOutputStream outContent; protected static ByteArrayOutputStream errContent; enum RunTypes { YARN_SESSION, CLI_FRONTEND } /** * This method returns once the "startedAfterString" has been seen. */ protected Runner startWithArgs(String[] args, String startedAfterString, RunTypes type) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 60; Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, 0, stdinPrintStream); runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs())."); runner.start(); for (int second = 0; second < startTimeoutSeconds; second++) { sleep(1000); if (outContent.toString().contains(startedAfterString) || errContent.toString().contains(startedAfterString)) { LOG.info("Found expected output in redirected streams"); return runner; } if (!runner.isAlive()) { resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed with exception.", runner.getRunnerError()); } Assert.fail("Runner thread died before the test was finished."); } } resetStreamsAndSendOutput(); Assert.fail("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string did not show up"); return null; } protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnStrings, RunTypes type, int returnCode) throws IOException { runWithArgs(args, terminateAfterString, failOnStrings, type, returnCode, false); } /** * The test has been passed once the "terminateAfterString" has been seen. * @param args Command line arguments for the runner * @param terminateAfterString the runner is searching the stdout and stderr for this string. as soon as it appears, the test has passed * @param failOnPatterns The runner is searching stdout and stderr for the pattern (regexp) specified here. If one appears, the test has failed * @param type Set the type of the runner * @param expectedReturnValue Expected return code from the runner. * @param checkLogForTerminateString If true, the runner checks also the log4j logger for the terminate string */ protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnPatterns, RunTypes type, int expectedReturnValue, boolean checkLogForTerminateString) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 180; final long deadline = System.currentTimeMillis() + (startTimeoutSeconds * 1000); Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, expectedReturnValue, stdinPrintStream); runner.start(); boolean expectedStringSeen = false; boolean testPassedFromLog4j = false; long shutdownTimeout = 30000L; do { sleep(1000); String outContentString = outContent.toString(); String errContentString = errContent.toString(); if (failOnPatterns != null) { for (String failOnString : failOnPatterns) { Pattern pattern = Pattern.compile(failOnString); if (pattern.matcher(outContentString).find() || pattern.matcher(errContentString).find()) { LOG.warn("Failing test. Output contained illegal string '" + failOnString + "'"); resetStreamsAndSendOutput(); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } Assert.fail("Output contained illegal string '" + failOnString + "'"); } } } if (checkLogForTerminateString) { LoggingEvent matchedEvent = UtilsTest.getEventContainingString(terminateAfterString); if (matchedEvent != null) { testPassedFromLog4j = true; LOG.info("Found expected output in logging event {}", matchedEvent); } } if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j) { expectedStringSeen = true; LOG.info("Found expected output in redirected streams"); LOG.info("RunWithArgs: request runner to stop"); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } LOG.warn("RunWithArgs runner stopped."); } else { if (!runner.isAlive()) { break; } } } while (runner.getRunnerError() == null && !expectedStringSeen && System.currentTimeMillis() < deadline); resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed", runner.getRunnerError()); } Assert.assertTrue("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string \"" + terminateAfterString + "\" did not show up.", expectedStringSeen); LOG.info("Test was successful"); } protected static void resetStreamsAndSendOutput() { System.setOut(ORIGINAL_STDOUT); System.setErr(ORIGINAL_STDERR); System.setIn(ORIGINAL_STDIN); LOG.info("Sending stdout content through logger: \n\n{}\n\n", outContent.toString()); LOG.info("Sending stderr content through logger: \n\n{}\n\n", errContent.toString()); } /** * Utility class to run yarn jobs. */ protected static class Runner extends Thread { private final String[] args; private final org.apache.flink.configuration.Configuration configuration; private final String configurationDirectory; private final int expectedReturnValue; private final PrintStream stdinPrintStream; private RunTypes type; private FlinkYarnSessionCli yCli; private Throwable runnerError; public Runner( String[] args, org.apache.flink.configuration.Configuration configuration, String configurationDirectory, RunTypes type, int expectedReturnValue, PrintStream stdinPrintStream) { this.args = args; this.configuration = Preconditions.checkNotNull(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.type = type; this.expectedReturnValue = expectedReturnValue; this.stdinPrintStream = Preconditions.checkNotNull(stdinPrintStream); } @Override public void run() { try { int returnValue; switch (type) { case YARN_SESSION: yCli = new FlinkYarnSessionCli( configuration, configurationDirectory, "", "", true); returnValue = yCli.run(args); break; case CLI_FRONTEND: try { CliFrontend cli = new CliFrontend( configuration, CliFrontend.loadCustomCommandLines(configuration, configurationDirectory)); returnValue = cli.parseParameters(args); } catch (Exception e) { throw new RuntimeException("Failed to execute the following args with CliFrontend: " + Arrays.toString(args), e); } break; default: throw new RuntimeException("Unknown type " + type); } if (returnValue != this.expectedReturnValue) { Assert.fail("The YARN session returned with unexpected value=" + returnValue + " expected=" + expectedReturnValue); } } catch (Throwable t) { LOG.info("Runner stopped with exception", t); this.runnerError = t; } } /** Stops the Yarn session. */ public void sendStop() { stdinPrintStream.println("stop"); } public Throwable getRunnerError() { return runnerError; } } @AfterClass public static void teardown() throws Exception { LOG.info("Stopping MiniYarn Cluster"); yarnCluster.stop(); Map<String, String> map = new HashMap<>(System.getenv()); map.remove(ConfigConstants.ENV_FLINK_CONF_DIR); map.remove("YARN_CONF_DIR"); map.remove("IN_TESTS"); TestBaseUtils.setEnv(map); if (tempConfPathForSecureRun != null) { FileUtil.fullyDelete(tempConfPathForSecureRun); tempConfPathForSecureRun = null; } if (yarnSiteXML != null) { yarnSiteXML.delete(); } if (isOnTravis()) { File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!target.mkdirs()) { LOG.warn("Error creating dirs to {}", target); } File src = tmp.getRoot(); LOG.info("copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath()); try { FileUtils.copyDirectoryToDirectory(src, target); } catch (IOException e) { LOG.warn("Error copying the final files from {} to {}: msg: {}", src.getAbsolutePath(), target.getAbsolutePath(), e.getMessage(), e); } } } public static boolean isOnTravis() { return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true"); } /** * Wrapper around a {@link Scanner} that buffers the last N lines read. */ private static class BufferingScanner { private final Scanner scanner; private final int numLinesBuffered; private final List<String> bufferedLines; BufferingScanner(Scanner scanner, int numLinesBuffered) { this.scanner = scanner; this.numLinesBuffered = numLinesBuffered; this.bufferedLines = new ArrayList<>(numLinesBuffered); } public boolean hasNextLine() { return scanner.hasNextLine(); } public String nextLine() { if (bufferedLines.size() == numLinesBuffered) { bufferedLines.remove(0); } String line = scanner.nextLine(); bufferedLines.add(line); return line; } public List<String> getPreviousLines() { return new ArrayList<>(bufferedLines); } } }
class YarnTestBase extends TestLogger { private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class); protected static final PrintStream ORIGINAL_STDOUT = System.out; protected static final PrintStream ORIGINAL_STDERR = System.err; private static final InputStream ORIGINAL_STDIN = System.in; protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name"; protected static final int NUM_NODEMANAGERS = 2; /** The tests are scanning for these strings in the final output. */ protected static final String[] PROHIBITED_STRINGS = { "Exception", "Started SelectChannelConnector@0.0.0.0:8081" }; /** These strings are white-listed, overriding the prohibited strings. */ protected static final String[] WHITELISTED_STRINGS = { "akka.remote.RemoteTransportExceptionNoStackTrace", "java.lang.InterruptedException", "Remote connection to [null] failed with java.net.ConnectException: Connection refused", "Remote connection to [null] failed with java.nio.channels.NotYetConnectedException", "java.io.IOException: Connection reset by peer", "java.util.concurrent.RejectedExecutionException: Worker has already been shutdown", "org.apache.flink.util.FlinkException: Stopping JobMaster", "org.apache.flink.util.FlinkException: JobManager is shutting down.", "lost the leadership." }; @ClassRule public static TemporaryFolder tmp = new TemporaryFolder(); protected static MiniYARNCluster yarnCluster = null; /** * Uberjar (fat jar) file of Flink. */ protected static File flinkUberjar; protected static final YarnConfiguration YARN_CONFIGURATION; /** * lib/ folder of the flink distribution. */ protected static File flinkLibFolder; /** * Temporary folder where Flink configurations will be kept for secure run. */ protected static File tempConfPathForSecureRun = null; protected static File flinkShadedHadoopDir; protected static File yarnSiteXML = null; private YarnClient yarnClient = null; private static org.apache.flink.configuration.Configuration globalConfiguration; protected org.apache.flink.configuration.Configuration flinkConfiguration; static { YARN_CONFIGURATION = new YarnConfiguration(); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 32); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); YARN_CONFIGURATION.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4); YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600); YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_VCORES, 666); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); } public static void populateYarnSecureConfigurations(Configuration conf, String principal, String keytab) { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); conf.set(YarnConfiguration.RM_KEYTAB, keytab); conf.set(YarnConfiguration.RM_PRINCIPAL, principal); conf.set(YarnConfiguration.NM_KEYTAB, keytab); conf.set(YarnConfiguration.NM_PRINCIPAL, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]"); } @Before public void setupYarnClient() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); } /** * Sleep a bit between the tests (we are re-using the YARN cluster for the tests). */ @After public void shutdownYarnClient() { yarnClient.stop(); } private class CleanupYarnApplication implements AutoCloseable { @Override public void close() throws Exception { Deadline deadline = Deadline.now().plus(Duration.ofSeconds(10)); boolean isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); while (deadline.hasTimeLeft() && isAnyJobRunning) { try { Thread.sleep(500); } catch (InterruptedException e) { Assert.fail("Should not happen"); } isAnyJobRunning = yarnClient.getApplications().stream() .anyMatch(YarnTestBase::isApplicationRunning); } if (isAnyJobRunning) { final List<String> runningApps = yarnClient.getApplications().stream() .filter(YarnTestBase::isApplicationRunning) .map(app -> "App " + app.getApplicationId() + " is in state " + app.getYarnApplicationState() + '.') .collect(Collectors.toList()); if (!runningApps.isEmpty()) { Assert.fail("There is at least one application on the cluster that is not finished." + runningApps); } } } } private static boolean isApplicationRunning(ApplicationReport app) { final YarnApplicationState yarnApplicationState = app.getYarnApplicationState(); return yarnApplicationState != YarnApplicationState.FINISHED && app.getYarnApplicationState() != YarnApplicationState.KILLED && app.getYarnApplicationState() != YarnApplicationState.FAILED; } @Nullable protected YarnClient getYarnClient() { return yarnClient; } protected static YarnConfiguration getYarnConfiguration() { return YARN_CONFIGURATION; } /** * Locate a file or directory. */ public static File findFile(String startAt, FilenameFilter fnf) { File root = new File(startAt); String[] files = root.list(); if (files == null) { return null; } for (String file : files) { File f = new File(startAt + File.separator + file); if (f.isDirectory()) { File r = findFile(f.getAbsolutePath(), fnf); if (r != null) { return r; } } else if (fnf.accept(f.getParentFile(), f.getName())) { return f; } } return null; } @Nonnull YarnClusterDescriptor createYarnClusterDescriptor(org.apache.flink.configuration.Configuration flinkConfiguration) { final YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor( flinkConfiguration, YARN_CONFIGURATION, CliFrontend.getConfigurationDirectoryFromEnv(), yarnClient, true); yarnClusterDescriptor.setLocalJarPath(new Path(flinkUberjar.toURI())); yarnClusterDescriptor.addShipFiles(Collections.singletonList(flinkLibFolder)); return yarnClusterDescriptor; } /** * Filter to find root dir of the flink-yarn dist. */ public static class RootDirFilenameFilter implements FilenameFilter { @Override public boolean accept(File dir, String name) { return name.startsWith("flink-dist") && name.endsWith(".jar") && dir.toString().contains("/lib"); } } /** * A simple {@link FilenameFilter} that only accepts files if their name contains every string in the array passed * to the constructor. */ public static class ContainsName implements FilenameFilter { private String[] names; private String excludeInPath = null; /** * @param names which have to be included in the filename. */ public ContainsName(String[] names) { this.names = names; } public ContainsName(String[] names, String excludeInPath) { this.names = names; this.excludeInPath = excludeInPath; } @Override public boolean accept(File dir, String name) { if (excludeInPath == null) { for (String n: names) { if (!name.contains(n)) { return false; } } return true; } else { for (String n: names) { if (!name.contains(n)) { return false; } } return !dir.toString().contains(excludeInPath); } } } public static void writeYarnSiteConfigXML(Configuration yarnConf, File targetFolder) throws IOException { yarnSiteXML = new File(targetFolder, "/yarn-site.xml"); try (FileWriter writer = new FileWriter(yarnSiteXML)) { yarnConf.writeXml(writer); writer.flush(); } } /** * This method checks the written TaskManager and JobManager log files * for exceptions. * * <p>WARN: Please make sure the tool doesn't find old logfiles from previous test runs. * So always run "mvn clean" before running the tests here. * */ public static void ensureNoProhibitedStringInLogFiles(final String[] prohibited, final String[] whitelisted) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to exist", cwd.exists()); Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to be a directory", cwd.isDirectory()); List<String> prohibitedExcerpts = new ArrayList<>(); File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { File f = new File(dir.getAbsolutePath() + "/" + name); try { BufferingScanner scanner = new BufferingScanner(new Scanner(f), 10); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String aProhibited : prohibited) { if (lineFromFile.contains(aProhibited)) { boolean whitelistedFound = false; for (String white : whitelisted) { if (lineFromFile.contains(white)) { whitelistedFound = true; break; } } if (!whitelistedFound) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOG.error(fatal, "Prohibited String '{}' in line '{}'", aProhibited, lineFromFile); StringBuilder logExcerpt = new StringBuilder(); logExcerpt.append(System.lineSeparator()); for (String previousLine : scanner.getPreviousLines()) { logExcerpt.append(previousLine); logExcerpt.append(System.lineSeparator()); } logExcerpt.append(lineFromFile); logExcerpt.append(System.lineSeparator()); while (scanner.hasNextLine()) { String line = scanner.nextLine(); logExcerpt.append(line); logExcerpt.append(System.lineSeparator()); if (line.isEmpty() || (!Character.isWhitespace(line.charAt(0)) && !line.startsWith("Caused by"))) { for (int x = 0; x < 10 && scanner.hasNextLine(); x++) { logExcerpt.append(scanner.nextLine()); logExcerpt.append(System.lineSeparator()); } break; } } prohibitedExcerpts.add(logExcerpt.toString()); return true; } } } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { Scanner scanner = null; try { scanner = new Scanner(foundFile); } catch (FileNotFoundException e) { Assert.fail("Unable to locate file: " + e.getMessage() + " file: " + foundFile.getAbsolutePath()); } LOG.warn("Found a file with a prohibited string. Printing contents:"); while (scanner.hasNextLine()) { LOG.warn("LINE: " + scanner.nextLine()); } Assert.fail( "Found a file " + foundFile + " with a prohibited string (one of " + Arrays.toString(prohibited) + "). " + "Excerpts:" + System.lineSeparator() + prohibitedExcerpts); } } public static boolean verifyStringsInNamedLogFiles( final String[] mustHave, final String fileName) { List<String> mustHaveList = Arrays.asList(mustHave); File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (fileName != null && !name.equals(fileName)) { return false; } File f = new File(dir.getAbsolutePath() + "/" + name); LOG.info("Searching in {}", f.getAbsolutePath()); try (Scanner scanner = new Scanner(f)) { Set<String> foundSet = new HashSet<>(mustHave.length); while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); for (String str : mustHave) { if (lineFromFile.contains(str)) { foundSet.add(str); } } if (foundSet.containsAll(mustHaveList)) { return true; } } } catch (FileNotFoundException e) { LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath()); } return false; } }); if (foundFile != null) { LOG.info("Found string {} in {}.", Arrays.toString(mustHave), foundFile.getAbsolutePath()); return true; } else { return false; } } public static boolean verifyTokenKindInContainerCredentials(final Collection<String> tokens, final String containerId) throws IOException { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!cwd.exists() || !cwd.isDirectory()) { return false; } File containerTokens = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.equals(containerId + ".tokens"); } }); if (containerTokens != null) { LOG.info("Verifying tokens in {}", containerTokens.getAbsolutePath()); Credentials tmCredentials = Credentials.readTokenStorageFile(containerTokens, new Configuration()); Collection<Token<? extends TokenIdentifier>> userTokens = tmCredentials.getAllTokens(); Set<String> tokenKinds = new HashSet<>(4); for (Token<? extends TokenIdentifier> token : userTokens) { tokenKinds.add(token.getKind().toString()); } return tokenKinds.containsAll(tokens); } else { LOG.warn("Unable to find credential file for container {}", containerId); return false; } } public static String getContainerIdByLogName(String logName) { File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); File containerLog = findFile(cwd.getAbsolutePath(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.equals(logName); } }); if (containerLog != null) { return containerLog.getParentFile().getName(); } else { throw new IllegalStateException("No container has log named " + logName); } } public static void sleep(int time) { try { Thread.sleep(time); } catch (InterruptedException e) { LOG.warn("Interruped", e); } } public static int getRunningContainers() { int count = 0; for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) { NodeManager nm = yarnCluster.getNodeManager(nmId); ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers(); count += containers.size(); } return count; } public static void startYARNSecureMode(YarnConfiguration conf, String principal, String keytab) { start(conf, principal, keytab); } public static void startYARNWithConfig(YarnConfiguration conf) { start(conf, null, null); } private static void start(YarnConfiguration conf, String principal, String keytab) { File homeDir = null; try { homeDir = tmp.newFolder(); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } System.setProperty("user.home", homeDir.getAbsolutePath()); String uberjarStartLoc = ".."; LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc).getAbsolutePath()); flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter()); Assert.assertNotNull("Flink uberjar not found", flinkUberjar); String flinkDistRootDir = flinkUberjar.getParentFile().getParent(); flinkLibFolder = flinkUberjar.getParentFile(); flinkShadedHadoopDir = Paths.get("target/shaded-hadoop").toFile(); Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder); Assert.assertTrue("lib folder not found", flinkLibFolder.exists()); Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory()); if (!flinkUberjar.exists()) { Assert.fail("Unable to locate yarn-uberjar.jar"); } try { LOG.info("Starting up MiniYARNCluster"); if (yarnCluster == null) { final String testName = conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY); yarnCluster = new MiniYARNCluster( testName == null ? "YarnTest_" + UUID.randomUUID() : testName, NUM_NODEMANAGERS, 1, 1); yarnCluster.init(conf); yarnCluster.start(); } Map<String, String> map = new HashMap<String, String>(System.getenv()); File flinkConfDirPath = findFile(flinkDistRootDir, new ContainsName(new String[]{"flink-conf.yaml"})); Assert.assertNotNull(flinkConfDirPath); final String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath(); globalConfiguration = GlobalConfiguration.loadConfiguration(confDirPath); tempConfPathForSecureRun = tmp.newFolder("conf"); FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun); BootstrapTools.writeConfiguration( globalConfiguration, new File(tempConfPathForSecureRun, "flink-conf.yaml")); String configDir = tempConfPathForSecureRun.getAbsolutePath(); LOG.info("Temporary Flink configuration directory to be used for secure test: {}", configDir); Assert.assertNotNull(configDir); map.put(ConfigConstants.ENV_FLINK_CONF_DIR, configDir); File targetTestClassesFolder = new File("target/test-classes"); writeYarnSiteConfigXML(conf, targetTestClassesFolder); map.put("IN_TESTS", "yes we are in tests"); map.put("YARN_CONF_DIR", targetTestClassesFolder.getAbsolutePath()); TestBaseUtils.setEnv(map); Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED); while (!yarnCluster.waitForNodeManagersToConnect(500)) { LOG.info("Waiting for Nodemanagers to connect"); } } catch (Exception ex) { ex.printStackTrace(); LOG.error("setup failure", ex); Assert.fail(); } } /** * Default @BeforeClass impl. Overwrite this for passing a different configuration */ @BeforeClass public static void setup() throws Exception { startYARNWithConfig(YARN_CONFIGURATION); } protected static ByteArrayOutputStream outContent; protected static ByteArrayOutputStream errContent; enum RunTypes { YARN_SESSION, CLI_FRONTEND } /** * This method returns once the "startedAfterString" has been seen. */ protected Runner startWithArgs(String[] args, String startedAfterString, RunTypes type) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 60; Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, 0, stdinPrintStream); runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs())."); runner.start(); for (int second = 0; second < startTimeoutSeconds; second++) { sleep(1000); if (outContent.toString().contains(startedAfterString) || errContent.toString().contains(startedAfterString)) { LOG.info("Found expected output in redirected streams"); return runner; } if (!runner.isAlive()) { resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed with exception.", runner.getRunnerError()); } Assert.fail("Runner thread died before the test was finished."); } } resetStreamsAndSendOutput(); Assert.fail("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string did not show up"); return null; } protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnStrings, RunTypes type, int returnCode) throws IOException { runWithArgs(args, terminateAfterString, failOnStrings, type, returnCode, false); } /** * The test has been passed once the "terminateAfterString" has been seen. * @param args Command line arguments for the runner * @param terminateAfterString the runner is searching the stdout and stderr for this string. as soon as it appears, the test has passed * @param failOnPatterns The runner is searching stdout and stderr for the pattern (regexp) specified here. If one appears, the test has failed * @param type Set the type of the runner * @param expectedReturnValue Expected return code from the runner. * @param checkLogForTerminateString If true, the runner checks also the log4j logger for the terminate string */ protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnPatterns, RunTypes type, int expectedReturnValue, boolean checkLogForTerminateString) throws IOException { LOG.info("Running with args {}", Arrays.toString(args)); outContent = new ByteArrayOutputStream(); errContent = new ByteArrayOutputStream(); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); PrintStream stdinPrintStream = new PrintStream(out); System.setOut(new PrintStream(outContent)); System.setErr(new PrintStream(errContent)); System.setIn(in); final int startTimeoutSeconds = 180; final long deadline = System.currentTimeMillis() + (startTimeoutSeconds * 1000); Runner runner = new Runner( args, flinkConfiguration, CliFrontend.getConfigurationDirectoryFromEnv(), type, expectedReturnValue, stdinPrintStream); runner.start(); boolean expectedStringSeen = false; boolean testPassedFromLog4j = false; long shutdownTimeout = 30000L; do { sleep(1000); String outContentString = outContent.toString(); String errContentString = errContent.toString(); if (failOnPatterns != null) { for (String failOnString : failOnPatterns) { Pattern pattern = Pattern.compile(failOnString); if (pattern.matcher(outContentString).find() || pattern.matcher(errContentString).find()) { LOG.warn("Failing test. Output contained illegal string '" + failOnString + "'"); resetStreamsAndSendOutput(); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } Assert.fail("Output contained illegal string '" + failOnString + "'"); } } } if (checkLogForTerminateString) { LoggingEvent matchedEvent = UtilsTest.getEventContainingString(terminateAfterString); if (matchedEvent != null) { testPassedFromLog4j = true; LOG.info("Found expected output in logging event {}", matchedEvent); } } if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j) { expectedStringSeen = true; LOG.info("Found expected output in redirected streams"); LOG.info("RunWithArgs: request runner to stop"); runner.sendStop(); try { runner.join(shutdownTimeout); } catch (InterruptedException e) { LOG.warn("Interrupted while stopping runner", e); } LOG.warn("RunWithArgs runner stopped."); } else { if (!runner.isAlive()) { break; } } } while (runner.getRunnerError() == null && !expectedStringSeen && System.currentTimeMillis() < deadline); resetStreamsAndSendOutput(); if (runner.getRunnerError() != null) { throw new RuntimeException("Runner failed", runner.getRunnerError()); } Assert.assertTrue("During the timeout period of " + startTimeoutSeconds + " seconds the " + "expected string \"" + terminateAfterString + "\" did not show up.", expectedStringSeen); LOG.info("Test was successful"); } protected static void resetStreamsAndSendOutput() { System.setOut(ORIGINAL_STDOUT); System.setErr(ORIGINAL_STDERR); System.setIn(ORIGINAL_STDIN); LOG.info("Sending stdout content through logger: \n\n{}\n\n", outContent.toString()); LOG.info("Sending stderr content through logger: \n\n{}\n\n", errContent.toString()); } /** * Utility class to run yarn jobs. */ protected static class Runner extends Thread { private final String[] args; private final org.apache.flink.configuration.Configuration configuration; private final String configurationDirectory; private final int expectedReturnValue; private final PrintStream stdinPrintStream; private RunTypes type; private FlinkYarnSessionCli yCli; private Throwable runnerError; public Runner( String[] args, org.apache.flink.configuration.Configuration configuration, String configurationDirectory, RunTypes type, int expectedReturnValue, PrintStream stdinPrintStream) { this.args = args; this.configuration = Preconditions.checkNotNull(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.type = type; this.expectedReturnValue = expectedReturnValue; this.stdinPrintStream = Preconditions.checkNotNull(stdinPrintStream); } @Override public void run() { try { int returnValue; switch (type) { case YARN_SESSION: yCli = new FlinkYarnSessionCli( configuration, configurationDirectory, "", "", true); returnValue = yCli.run(args); break; case CLI_FRONTEND: try { CliFrontend cli = new CliFrontend( configuration, CliFrontend.loadCustomCommandLines(configuration, configurationDirectory)); returnValue = cli.parseParameters(args); } catch (Exception e) { throw new RuntimeException("Failed to execute the following args with CliFrontend: " + Arrays.toString(args), e); } break; default: throw new RuntimeException("Unknown type " + type); } if (returnValue != this.expectedReturnValue) { Assert.fail("The YARN session returned with unexpected value=" + returnValue + " expected=" + expectedReturnValue); } } catch (Throwable t) { LOG.info("Runner stopped with exception", t); this.runnerError = t; } } /** Stops the Yarn session. */ public void sendStop() { stdinPrintStream.println("stop"); } public Throwable getRunnerError() { return runnerError; } } @AfterClass public static void teardown() throws Exception { LOG.info("Stopping MiniYarn Cluster"); yarnCluster.stop(); Map<String, String> map = new HashMap<>(System.getenv()); map.remove(ConfigConstants.ENV_FLINK_CONF_DIR); map.remove("YARN_CONF_DIR"); map.remove("IN_TESTS"); TestBaseUtils.setEnv(map); if (tempConfPathForSecureRun != null) { FileUtil.fullyDelete(tempConfPathForSecureRun); tempConfPathForSecureRun = null; } if (yarnSiteXML != null) { yarnSiteXML.delete(); } if (isOnTravis()) { File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY)); if (!target.mkdirs()) { LOG.warn("Error creating dirs to {}", target); } File src = tmp.getRoot(); LOG.info("copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath()); try { FileUtils.copyDirectoryToDirectory(src, target); } catch (IOException e) { LOG.warn("Error copying the final files from {} to {}: msg: {}", src.getAbsolutePath(), target.getAbsolutePath(), e.getMessage(), e); } } } public static boolean isOnTravis() { return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true"); } /** * Wrapper around a {@link Scanner} that buffers the last N lines read. */ private static class BufferingScanner { private final Scanner scanner; private final int numLinesBuffered; private final List<String> bufferedLines; BufferingScanner(Scanner scanner, int numLinesBuffered) { this.scanner = scanner; this.numLinesBuffered = numLinesBuffered; this.bufferedLines = new ArrayList<>(numLinesBuffered); } public boolean hasNextLine() { return scanner.hasNextLine(); } public String nextLine() { if (bufferedLines.size() == numLinesBuffered) { bufferedLines.remove(0); } String line = scanner.nextLine(); bufferedLines.add(line); return line; } public List<String> getPreviousLines() { return new ArrayList<>(bufferedLines); } } }
Can we add some documentation to type desc as an expression
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); return parseConstDecl(metadata, qualifier, constKeyword); } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: endContext(); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: STNode constantDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); endContext(); return constantDecl; default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName, isListener); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor given the type. * <p> * <code>optional-type-descriptor := type-descriptor `?`</code> * </p> * * @param typeDescriptorNode Preceding type descriptor * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); if (arrayLengthNode != null) { switch (arrayLengthNode.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind; if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken, arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); arrayLengthNode = STNodeFactory.createEmptyNode(); } } return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the <a href="https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (isPredeclaredIdentifier(peek().kind)) { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } else { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { SyntaxKind nextTokenKind = peek().kind; if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) { return getExpressionAsStatement(expression); } switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context, expression); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * </p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); if (expression.kind == SyntaxKind.CHECK_EXPRESSION) { expression = validateCallExpression(expression); } return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode validateCallExpression(STNode callExpr) { STCheckExpressionNode checkExpr = (STCheckExpressionNode) callExpr; STNode expr = checkExpr.expression; if (expr.kind == SyntaxKind.FUNCTION_CALL || expr.kind == SyntaxKind.METHOD_CALL) { return callExpr; } STNode checkKeyword = checkExpr.checkKeyword; if (expr.kind == SyntaxKind.CHECK_EXPRESSION) { expr = validateCallExpression(expr); return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkKeyword, expr); } STNode checkingKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(checkKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_EXPECTED_CALL_EXPRESSION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); STNode funcCallExpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, funcCallExpr); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; default: STToken token = peek(); recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseArgListOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseArgListCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) { STNode typeParameter = parseTypeParameter(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter); } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * type * | class * | [object|service remote] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: case CLASS_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := type | class | [object|service remote] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case SERVICE_KEYWORD: return parseServiceAttachPoint(sourceKeyword, firstIdent); case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: default: STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } /** * Parse remote ident. * * @return Parsed node */ private STNode parseRemoteIdent() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_IDENT); return parseRemoteIdent(); } } /** * Parse service attach point. * <code>service-attach-point := service | service remote function</code> * * @return Parsed node */ private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) { STNode identList; STToken token = peek(); switch (token.kind) { case REMOTE_KEYWORD: STNode secondIdent = parseRemoteIdent(); STNode thirdIdent = parseFunctionIdent(); identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case COMMA_TOKEN: case SEMICOLON_TOKEN: identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); default: recover(token, ParserRuleContext.SERVICE_IDENT_RHS); return parseServiceAttachPoint(sourceKeyword, firstIdent); } } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { switch (peek().kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: STToken token = peek(); recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt } * </code> * * @param annots Annotations attached to the worker decl * @param qualifiers Preceding transactional keyword in a list * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode transactionalKeyword = getTransactionalKeyword(qualifiers); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode getTransactionalKeyword(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode transactionalKeyword; if (validatedList.isEmpty()) { transactionalKeyword = STNodeFactory.createEmptyNode(); } else { transactionalKeyword = validatedList.get(0); } return transactionalKeyword; } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause]</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false); return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc); } private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case XML_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: case TRANSACTION_KEYWORD: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) { switch (nodeKind) { case VAR_KEYWORD: case READONLY_KEYWORD: return false; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case VAR_KEYWORD: case READONLY_KEYWORD: return true; default: return false; } } static boolean isPredeclaredPrefix(SyntaxKind nodeKind) { switch (nodeKind) { case BOOLEAN_KEYWORD: case DECIMAL_KEYWORD: case ERROR_KEYWORD: case FLOAT_KEYWORD: case FUTURE_KEYWORD: case INT_KEYWORD: case MAP_KEYWORD: case OBJECT_KEYWORD: case STREAM_KEYWORD: case STRING_KEYWORD: case TABLE_KEYWORD: case TRANSACTION_KEYWORD: case TYPEDESC_KEYWORD: case XML_KEYWORD: return true; default: return false; } } private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) { return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN; } private SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; default: assert false : typeKeyword + " is not a built-in type"; return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) { addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED); continue; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr); } private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [type-parameter] * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyword(); return parseErrorTypeDescriptor(errorKeywordToken); } private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) { STNode errorTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { errorTypeParamsNode = parseTypeParameter(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse typedesc type descriptor. * typedesc-type-descriptor := typedesc type-parameter * * @return Parsed typedesc type node */ private STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) { STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode); } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) { STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse xml type descriptor. * xml-type-descriptor := xml type-parameter * * @return Parsed typedesc type node */ private STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) { STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STREAM_KEYWORD); return parseStreamKeyword(); } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (!isEndOfInterpolation()) { STToken nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } private boolean isEndOfInterpolation() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: ParserMode currentLexerMode = this.tokenReader.getCurrentMode(); return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION && currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT; } } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor(STNode tableKeywordToken) { STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := [isolated] function function-signature</code> * * @param qualifiers Preceding type descriptor qualifiers * @return Function type descriptor node */ private STNode parseFunctionTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode signature; switch (peek().kind) { case OPEN_PAREN_TOKEN: signature = parseFuncSignature(true); qualifierList = createFuncTypeQualNodeList(qualifiers, true); break; default: signature = STNodeFactory.createEmptyNode(); qualifierList = createFuncTypeQualNodeList(qualifiers, false); break; } endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature); } private STNode createFuncTypeQualNodeList(List<STNode> qualifierList, boolean hasFuncSignature) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } return STNodeFactory.createNodeList(validatedList); } private boolean isRegularFuncQual(SyntaxKind tokenKind) { switch (tokenKind) { case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := * [annots] (isolated| transactional) function function-signature anon-func-body</code> * * @param annots Annotations. * @param qualifiers Function qualifiers * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode qualifierList = createFuncTypeQualNodeList(qualifiers, true); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param bracedExpression Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) { List<STNode> paramList = new ArrayList<>(); STNode innerExpression = bracedExpression.expression; STNode openParen = bracedExpression.openParen; if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { paramList.add(innerExpression); } else { openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); } return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, STNodeFactory.createNodeList(paramList), bracedExpression.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTupleMemberRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseQueryConstructType(parseStreamKeyword(), null); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); return parseTableConstructorOrQueryInternal(isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr); } if (selectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause, onConflictClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case LIMIT_KEYWORD: return parseLimitClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: return null; default: recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); return parseIntermediateClause(isRhsExpr); } } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse limit-keyword. * * @return limit-keyword node */ private STNode parseLimitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LIMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LIMIT_KEYWORD); return parseLimitKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken.kind)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfOrderKeys(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(tokenKind); } } private boolean isQueryClauseStartToken(SyntaxKind tokenKind) { switch (tokenKind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; default: return false; } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); endContext(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STNode limitKeyword = parseLimitKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken.kind)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); arguments = STNodeFactory.createEmptyNodeList(); closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false); return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(); switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case TABLE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); return STNodeFactory.createSimpleNameReferenceNode(functionKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = consume(); STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(openGTToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = consume(); STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, boolean isRhsExpr) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); STNode nextToken = peek(); STNode endExpr; STNode colon; if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr; STNode modulePrefix = qualifiedNameRef.modulePrefix; if (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) { middleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix); } else { middleExpr = modulePrefix; } colon = qualifiedNameRef.colon; endContext(); endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); } else { if (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { middleExpr = generateQualifiedNameRef(middleExpr); } colon = parseColon(); endContext(); endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false); } return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } private STNode generateQualifiedNameRef(STNode qualifiedName) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName; STNode modulePrefix = qualifiedNameRef.modulePrefix; if (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) { STBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix; STToken nameToken = (STToken) builtInType.name; STNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(), nameToken.leadingMinutiae(), nameToken.trailingMinutiae()); return STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix, qualifiedNameRef.colon, qualifiedNameRef.identifier); } else { return qualifiedName; } } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) { switch (peek().kind) { case OPEN_BRACE_TOKEN: reportInvalidStatementAnnots(annots, qualifiers); reportInvalidQualifierList(qualifiers); return parseTransactionStatement(transactionKeyword); case COLON_TOKEN: if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } default: Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF); if (solution.action == Action.KEEP || (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code> * * @return Transaction statement node */ private STNode parseTransactionStatement(STNode transactionKeyword) { startContext(ParserRuleContext.TRANSACTION_STMT); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause] * <br/> * retry-spec := [type-parameter] [ `(` arg-list `)` ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(consume()); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse optional on fail clause. * * @return Parsed node */ private STNode parseOptionalOnFailClause() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ON_KEYWORD) { return parseOnFailClause(); } if (isEndOfRegularCompoundStmt(nextToken.kind)) { return STNodeFactory.createEmptyNode(); } recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS); return parseOptionalOnFailClause(); } private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) { switch (nodeKind) { case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case AT_TOKEN: case EOF_TOKEN: return true; default: return isStatementStartingToken(nodeKind); } } private boolean isStatementStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: return true; default: if (isTypeStartingToken(nodeKind)) { return true; } if (isValidExpressionStart(nodeKind, 1)) { return true; } return false; } } /** * Parse on fail clause. * <p> * <code> * on-fail-clause := on fail typed-binding-pattern statement-block * </code> * * @return On fail clause node */ private STNode parseOnFailClause() { startContext(ParserRuleContext.ON_FAIL_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier, blockStatement); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); List<STNode> matchClausesList = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClausesList.add(clause); } STNode matchClauses = STNodeFactory.createNodeList(matchClausesList); if (isNodeListEmpty(matchClauses)) { openBrace = SyntaxErrors.addDiagnostic(openBrace, DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES); } STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { switch (peek().kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternListMemberRhs(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | error-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { switch (peek().kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case IDENTIFIER_TOKEN: STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseErrorMatchPattern(); default: recover(peek(), ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternListMemberRhs() { switch (peek().kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS); return parseMatchPatternListMemberRhs(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { break; } STNode invalidField = parseListMatchPatternMember(); updateLastNodeInListWithInvalidNode(matchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); List<STNode> fieldMatchPatternList = new ArrayList<>(); STNode fieldMatchPatternRhs = null; boolean isEndOfFields = false; while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); fieldMatchPatternList.add(fieldMatchPatternMember); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (fieldMatchPatternRhs != null) { fieldMatchPatternList.add(fieldMatchPatternRhs); } else { break; } } while (isEndOfFields && fieldMatchPatternRhs != null) { updateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { break; } STNode invalidField = parseFieldMatchPatternMember(); updateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); } STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken); } private STNode parseFieldMatchPatternMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); startContext(ParserRuleContext.ERROR_MATCH_PATTERN); return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern ) * error-arg-list-match-pattern := * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns] * | [error-field-match-patterns] * error-message-match-pattern := simple-match-pattern * error-cause-match-pattern := simple-match-pattern | error-match-pattern * simple-match-pattern := * wildcard-match-pattern * | const-pattern * | var variable-name * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * </p> * * @return Parsed functional match pattern node. */ private STNode parseErrorMatchPattern() { startContext(ParserRuleContext.ERROR_MATCH_PATTERN); STNode errorKeyword = consume(); return parseErrorMatchPattern(errorKeyword); } private STNode parseErrorMatchPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS); return parseErrorMatchPattern(errorKeyword); } return parseErrorMatchPattern(errorKeyword, typeRef); } private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesisToken = parseOpenParenthesis(); STNode argListMatchPatternNode = parseErrorArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseErrorArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); if (isEndOfErrorFieldMatchPatterns()) { return STNodeFactory.createNodeList(argListMatchPatterns); } startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG); STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START); endContext(); if (firstArg == null) { return STNodeFactory.createNodeList(argListMatchPatterns); } if (isSimpleMatchPattern(firstArg.kind)) { argListMatchPatterns.add(firstArg); STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS); if (isValidSecondArgMatchPattern(secondArg.kind)) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } else { argListMatchPatterns.add(firstArg); } } parseErrorFieldMatchPatterns(argListMatchPatterns); return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) { switch (matchPatternKind) { case IDENTIFIER_TOKEN: case SIMPLE_NAME_REFERENCE: case NUMERIC_LITERAL: case STRING_LITERAL: case NULL_LITERAL: case NIL_LITERAL: case BOOLEAN_LITERAL: case TYPED_BINDING_PATTERN: case UNARY_EXPRESSION: return true; default: return false; } } private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case ERROR_MATCH_PATTERN: case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: return true; default: if (isSimpleMatchPattern(syntaxKind)) { return true; } return false; } } /** * Parse error field match patterns. * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * @param argListMatchPatterns */ private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN; while (!isEndOfErrorFieldMatchPatterns()) { STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListMatchPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldMatchPatterns() { return isEndOfErrorFieldBindingPatterns(); } private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgListMatchPatternEnd(currentCtx); } } private STNode parseErrorArgListMatchPattern(ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case IDENTIFIER_TOKEN: return parseNamedOrSimpleMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); case VAR_KEYWORD: STNode varType = createBuiltinSimpleNameReference(consume()); STNode variableName = createCaptureOrWildcardBP(parseVariableName()); return STNodeFactory.createTypedBindingPatternNode(varType, variableName); case CLOSE_PAREN_TOKEN: return null; default: recover(nextToken, context); return parseErrorArgListMatchPattern(context); } } private STNode parseNamedOrSimpleMatchPattern() { STNode identifier = consume(); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: return parseNamedArgMatchPattern(identifier); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return identifier; } } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; default: return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED; } } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics())); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @param qualifiers * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { List<STNode> varDeclQualifiers = new ArrayList<>(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment); } private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; if (isPredeclaredIdentifier(nextToken.kind)) { reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket(); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, pipe, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypedBPOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, ampersand, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isDefiniteExpr(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) { lhsType = getTypeDescFromExpr(lhsType); rhsType = getTypeDescFromExpr(rhsType); STNode newTypeDesc; if (separatorToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsType, separatorToken, rhsType); } else { newTypeDesc = createIntersectionTypeDesc(lhsType, separatorToken, rhsType); } STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.VARIABLE_NAME); STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP); } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode closeParen = parseCloseParenthesis(); STNode typeDescNode = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypeDescOrExpr(typeDescQualifiers); } private STNode parseTypeDescOrExpr(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers); break; case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket(); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @param qualifiers Preceding qualifiers * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature; if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) { funcSignature = parseFuncSignature(true); qualifierList = createFuncTypeQualNodeList(qualifiers, true); endContext(); return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature); } funcSignature = STNodeFactory.createEmptyNode(); qualifierList = createFuncTypeQualNodeList(qualifiers, false); STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) { ParserRuleContext currentCtx = getCurrentContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); } startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } private STNode parseTupleTypeDescOrExprStartsWithOpenBracket() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> members = new ArrayList<>(); STNode memberEnd; while (!isEndOfListConstructor(peek().kind)) { STNode expr = parseTypeDescOrExpr(); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) { STNode ellipsis = consume(); expr = STNodeFactory.createRestDescriptorNode(expr, ellipsis); } members.add(expr); memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } members.add(memberEnd); } STNode memberNodes = STNodeFactory.createNodeList(members); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket); } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern); return createCaptureOrWildcardBP(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPattern(typeDescQualifiers, context); } private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode identifier) { STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || endOfModuleLevelNode(1); } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } return parseErrorArgListBindingPatterns(argListBindingPatterns); } private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) { STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true); if (firstArg == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } switch (firstArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); case ERROR_BINDING_PATTERN: STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier); missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP, DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN); STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN, DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN); argListBindingPatterns.add(missingErrorMsgBP); argListBindingPatterns.add(missingComma); argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); default: addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBindingPatterns(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false); assert secondArg != null; switch (secondArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind); default: updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns, SyntaxKind lastValidArgKind) { while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false); assert currentArg != null; DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: STNode argNameOrSimpleBindingPattern = consume(); return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); case CLOSE_PAREN_TOKEN: if (isFirstArg) { return null; } default: recover(peek(), context); return parseErrorArgListBindingPattern(context, isFirstArg); } } private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) { STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { switch (peek().kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS: break; case NONE: default: STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(getBindingPattern(member)); memberList.add(memberEnd); bindingPattern = parseAsListBindingPattern(openBracket, memberList); typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) || isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext, isTypedBindingPattern); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = getKeyExpr(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = getKeyExpr(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS; if (isTypedBindingPattern) { recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS; } recover(peek(), recoveryCtx, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode getKeyExpr(STNode member) { if (member == null) { STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier); return STNodeFactory.createNodeList(missingVarRef); } return STNodeFactory.createNodeList(member); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns = STNodeFactory.createEmptyNodeList(); if (!isEmpty(member)) { SyntaxKind memberKind = member.kind; if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) { STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME); STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken); return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName); } if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member, DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP); } else { STNode bindingPattern = getBindingPattern(member); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } } STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @return Parsed node */ private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } if (isTypedBindingPattern) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr); } STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: if (isTypedBindingPattern) { return SyntaxKind.ARRAY_TYPE_DESC; } return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; case ERROR_CONSTRUCTOR: if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; default: if (isTypedBindingPattern) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList(boolean possibleMappingField) { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseStatementStartBracketedListMember(typeDescQualifiers); } private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); return parseMemberBracketedList(false); case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) { return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: reportInvalidQualifierList(qualifiers); return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: reportInvalidQualifierList(qualifiers); STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPatternOrErrorConstructor(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: reportInvalidQualifierList(qualifiers); return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(qualifiers); case FUNCTION_KEYWORD: return parseAnonFuncExprOrFuncTypeDesc(qualifiers); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers); return parseStatementStartBracketedListMember(qualifiers); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorConstructorExpr(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); return parseConstDecl(metadata, qualifier, constKeyword); } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: endContext(); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: STNode constantDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); endContext(); return constantDecl; default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName, isListener); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor given the type. * <p> * <code>optional-type-descriptor := type-descriptor `?`</code> * </p> * * @param typeDescriptorNode Preceding type descriptor * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); if (arrayLengthNode != null) { switch (arrayLengthNode.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind; if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken, arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); arrayLengthNode = STNodeFactory.createEmptyNode(); } } return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the <a href="https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (isPredeclaredIdentifier(peek().kind)) { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } else { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { SyntaxKind nextTokenKind = peek().kind; if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) { return getExpressionAsStatement(expression); } switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context, expression); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * </p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); if (expression.kind == SyntaxKind.CHECK_EXPRESSION) { expression = validateCallExpression(expression); } return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode validateCallExpression(STNode callExpr) { STCheckExpressionNode checkExpr = (STCheckExpressionNode) callExpr; STNode expr = checkExpr.expression; if (expr.kind == SyntaxKind.FUNCTION_CALL || expr.kind == SyntaxKind.METHOD_CALL) { return callExpr; } STNode checkKeyword = checkExpr.checkKeyword; if (expr.kind == SyntaxKind.CHECK_EXPRESSION) { expr = validateCallExpression(expr); return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkKeyword, expr); } STNode checkingKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(checkKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_EXPECTED_CALL_EXPRESSION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); STNode funcCallExpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, funcCallExpr); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; default: STToken token = peek(); recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseArgListOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseArgListCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) { STNode typeParameter = parseTypeParameter(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter); } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * type * | class * | [object|service remote] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: case CLASS_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := type | class | [object|service remote] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case SERVICE_KEYWORD: return parseServiceAttachPoint(sourceKeyword, firstIdent); case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: default: STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } /** * Parse remote ident. * * @return Parsed node */ private STNode parseRemoteIdent() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_IDENT); return parseRemoteIdent(); } } /** * Parse service attach point. * <code>service-attach-point := service | service remote function</code> * * @return Parsed node */ private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) { STNode identList; STToken token = peek(); switch (token.kind) { case REMOTE_KEYWORD: STNode secondIdent = parseRemoteIdent(); STNode thirdIdent = parseFunctionIdent(); identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case COMMA_TOKEN: case SEMICOLON_TOKEN: identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); default: recover(token, ParserRuleContext.SERVICE_IDENT_RHS); return parseServiceAttachPoint(sourceKeyword, firstIdent); } } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { switch (peek().kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: STToken token = peek(); recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt } * </code> * * @param annots Annotations attached to the worker decl * @param qualifiers Preceding transactional keyword in a list * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode transactionalKeyword = getTransactionalKeyword(qualifiers); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode getTransactionalKeyword(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode transactionalKeyword; if (validatedList.isEmpty()) { transactionalKeyword = STNodeFactory.createEmptyNode(); } else { transactionalKeyword = validatedList.get(0); } return transactionalKeyword; } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause]</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false); return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc); } private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case XML_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: case TRANSACTION_KEYWORD: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } /** * Check if the token kind is a type descriptor in terminal expression. * <p> * simple-type-in-expr := * boolean | int | byte | float | decimal | string | handle | json | anydata | any | never * * @param nodeKind token kind to check * @return <code>true</code> for simple type token in expression. <code>false</code> otherwise. */ private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) { switch (nodeKind) { case VAR_KEYWORD: case READONLY_KEYWORD: return false; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case VAR_KEYWORD: case READONLY_KEYWORD: return true; default: return false; } } static boolean isPredeclaredPrefix(SyntaxKind nodeKind) { switch (nodeKind) { case BOOLEAN_KEYWORD: case DECIMAL_KEYWORD: case ERROR_KEYWORD: case FLOAT_KEYWORD: case FUTURE_KEYWORD: case INT_KEYWORD: case MAP_KEYWORD: case OBJECT_KEYWORD: case STREAM_KEYWORD: case STRING_KEYWORD: case TABLE_KEYWORD: case TRANSACTION_KEYWORD: case TYPEDESC_KEYWORD: case XML_KEYWORD: return true; default: return false; } } private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) { return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN; } private SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; default: assert false : typeKeyword + " is not a built-in type"; return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) { addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED); continue; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr); } private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [type-parameter] * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyword(); return parseErrorTypeDescriptor(errorKeywordToken); } private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) { STNode errorTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { errorTypeParamsNode = parseTypeParameter(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse typedesc type descriptor. * typedesc-type-descriptor := typedesc type-parameter * * @return Parsed typedesc type node */ private STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) { STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode); } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) { STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse xml type descriptor. * xml-type-descriptor := xml type-parameter * * @return Parsed typedesc type node */ private STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) { STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STREAM_KEYWORD); return parseStreamKeyword(); } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (!isEndOfInterpolation()) { STToken nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } private boolean isEndOfInterpolation() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: ParserMode currentLexerMode = this.tokenReader.getCurrentMode(); return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION && currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT; } } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor(STNode tableKeywordToken) { STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := [isolated] function function-signature</code> * * @param qualifiers Preceding type descriptor qualifiers * @return Function type descriptor node */ private STNode parseFunctionTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode signature; switch (peek().kind) { case OPEN_PAREN_TOKEN: signature = parseFuncSignature(true); qualifierList = createFuncTypeQualNodeList(qualifiers, true); break; default: signature = STNodeFactory.createEmptyNode(); qualifierList = createFuncTypeQualNodeList(qualifiers, false); break; } endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature); } private STNode createFuncTypeQualNodeList(List<STNode> qualifierList, boolean hasFuncSignature) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } return STNodeFactory.createNodeList(validatedList); } private boolean isRegularFuncQual(SyntaxKind tokenKind) { switch (tokenKind) { case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := * [annots] (isolated| transactional) function function-signature anon-func-body</code> * * @param annots Annotations. * @param qualifiers Function qualifiers * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode qualifierList = createFuncTypeQualNodeList(qualifiers, true); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param bracedExpression Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) { List<STNode> paramList = new ArrayList<>(); STNode innerExpression = bracedExpression.expression; STNode openParen = bracedExpression.openParen; if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { paramList.add(innerExpression); } else { openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); } return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, STNodeFactory.createNodeList(paramList), bracedExpression.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTupleMemberRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseQueryConstructType(parseStreamKeyword(), null); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); return parseTableConstructorOrQueryInternal(isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr); } if (selectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause, onConflictClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case LIMIT_KEYWORD: return parseLimitClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: return null; default: recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); return parseIntermediateClause(isRhsExpr); } } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse limit-keyword. * * @return limit-keyword node */ private STNode parseLimitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LIMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LIMIT_KEYWORD); return parseLimitKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken.kind)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfOrderKeys(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(tokenKind); } } private boolean isQueryClauseStartToken(SyntaxKind tokenKind) { switch (tokenKind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; default: return false; } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); endContext(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STNode limitKeyword = parseLimitKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken.kind)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); arguments = STNodeFactory.createEmptyNodeList(); closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false); return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(); switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case TABLE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); return STNodeFactory.createSimpleNameReferenceNode(functionKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = consume(); STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(openGTToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = consume(); STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, boolean isRhsExpr) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); STNode nextToken = peek(); STNode endExpr; STNode colon; if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr; STNode modulePrefix = qualifiedNameRef.modulePrefix; if (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) { middleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix); } else { middleExpr = modulePrefix; } colon = qualifiedNameRef.colon; endContext(); endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); } else { if (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { middleExpr = generateQualifiedNameRef(middleExpr); } colon = parseColon(); endContext(); endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false); } return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } private STNode generateQualifiedNameRef(STNode qualifiedName) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName; STNode modulePrefix = qualifiedNameRef.modulePrefix; if (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) { STBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix; STToken nameToken = (STToken) builtInType.name; STNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(), nameToken.leadingMinutiae(), nameToken.trailingMinutiae()); return STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix, qualifiedNameRef.colon, qualifiedNameRef.identifier); } else { return qualifiedName; } } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) { switch (peek().kind) { case OPEN_BRACE_TOKEN: reportInvalidStatementAnnots(annots, qualifiers); reportInvalidQualifierList(qualifiers); return parseTransactionStatement(transactionKeyword); case COLON_TOKEN: if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } default: Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF); if (solution.action == Action.KEEP || (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code> * * @return Transaction statement node */ private STNode parseTransactionStatement(STNode transactionKeyword) { startContext(ParserRuleContext.TRANSACTION_STMT); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause] * <br/> * retry-spec := [type-parameter] [ `(` arg-list `)` ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(consume()); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse optional on fail clause. * * @return Parsed node */ private STNode parseOptionalOnFailClause() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ON_KEYWORD) { return parseOnFailClause(); } if (isEndOfRegularCompoundStmt(nextToken.kind)) { return STNodeFactory.createEmptyNode(); } recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS); return parseOptionalOnFailClause(); } private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) { switch (nodeKind) { case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case AT_TOKEN: case EOF_TOKEN: return true; default: return isStatementStartingToken(nodeKind); } } private boolean isStatementStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: return true; default: if (isTypeStartingToken(nodeKind)) { return true; } if (isValidExpressionStart(nodeKind, 1)) { return true; } return false; } } /** * Parse on fail clause. * <p> * <code> * on-fail-clause := on fail typed-binding-pattern statement-block * </code> * * @return On fail clause node */ private STNode parseOnFailClause() { startContext(ParserRuleContext.ON_FAIL_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier, blockStatement); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); List<STNode> matchClausesList = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClausesList.add(clause); } STNode matchClauses = STNodeFactory.createNodeList(matchClausesList); if (isNodeListEmpty(matchClauses)) { openBrace = SyntaxErrors.addDiagnostic(openBrace, DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES); } STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { switch (peek().kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternListMemberRhs(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | error-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { switch (peek().kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case IDENTIFIER_TOKEN: STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseErrorMatchPattern(); default: recover(peek(), ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternListMemberRhs() { switch (peek().kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS); return parseMatchPatternListMemberRhs(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { break; } STNode invalidField = parseListMatchPatternMember(); updateLastNodeInListWithInvalidNode(matchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); List<STNode> fieldMatchPatternList = new ArrayList<>(); STNode fieldMatchPatternRhs = null; boolean isEndOfFields = false; while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); fieldMatchPatternList.add(fieldMatchPatternMember); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (fieldMatchPatternRhs != null) { fieldMatchPatternList.add(fieldMatchPatternRhs); } else { break; } } while (isEndOfFields && fieldMatchPatternRhs != null) { updateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { break; } STNode invalidField = parseFieldMatchPatternMember(); updateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); } STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken); } private STNode parseFieldMatchPatternMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); startContext(ParserRuleContext.ERROR_MATCH_PATTERN); return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern ) * error-arg-list-match-pattern := * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns] * | [error-field-match-patterns] * error-message-match-pattern := simple-match-pattern * error-cause-match-pattern := simple-match-pattern | error-match-pattern * simple-match-pattern := * wildcard-match-pattern * | const-pattern * | var variable-name * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * </p> * * @return Parsed functional match pattern node. */ private STNode parseErrorMatchPattern() { startContext(ParserRuleContext.ERROR_MATCH_PATTERN); STNode errorKeyword = consume(); return parseErrorMatchPattern(errorKeyword); } private STNode parseErrorMatchPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS); return parseErrorMatchPattern(errorKeyword); } return parseErrorMatchPattern(errorKeyword, typeRef); } private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesisToken = parseOpenParenthesis(); STNode argListMatchPatternNode = parseErrorArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseErrorArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); if (isEndOfErrorFieldMatchPatterns()) { return STNodeFactory.createNodeList(argListMatchPatterns); } startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG); STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START); endContext(); if (firstArg == null) { return STNodeFactory.createNodeList(argListMatchPatterns); } if (isSimpleMatchPattern(firstArg.kind)) { argListMatchPatterns.add(firstArg); STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS); if (isValidSecondArgMatchPattern(secondArg.kind)) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } else { argListMatchPatterns.add(firstArg); } } parseErrorFieldMatchPatterns(argListMatchPatterns); return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) { switch (matchPatternKind) { case IDENTIFIER_TOKEN: case SIMPLE_NAME_REFERENCE: case NUMERIC_LITERAL: case STRING_LITERAL: case NULL_LITERAL: case NIL_LITERAL: case BOOLEAN_LITERAL: case TYPED_BINDING_PATTERN: case UNARY_EXPRESSION: return true; default: return false; } } private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case ERROR_MATCH_PATTERN: case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: return true; default: if (isSimpleMatchPattern(syntaxKind)) { return true; } return false; } } /** * Parse error field match patterns. * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * @param argListMatchPatterns */ private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN; while (!isEndOfErrorFieldMatchPatterns()) { STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListMatchPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldMatchPatterns() { return isEndOfErrorFieldBindingPatterns(); } private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgListMatchPatternEnd(currentCtx); } } private STNode parseErrorArgListMatchPattern(ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case IDENTIFIER_TOKEN: return parseNamedOrSimpleMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); case VAR_KEYWORD: STNode varType = createBuiltinSimpleNameReference(consume()); STNode variableName = createCaptureOrWildcardBP(parseVariableName()); return STNodeFactory.createTypedBindingPatternNode(varType, variableName); case CLOSE_PAREN_TOKEN: return null; default: recover(nextToken, context); return parseErrorArgListMatchPattern(context); } } private STNode parseNamedOrSimpleMatchPattern() { STNode identifier = consume(); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: return parseNamedArgMatchPattern(identifier); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return identifier; } } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; default: return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED; } } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics())); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @param qualifiers * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { List<STNode> varDeclQualifiers = new ArrayList<>(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment); } private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; if (isPredeclaredIdentifier(nextToken.kind)) { reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket(); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, pipe, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypedBPOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, ampersand, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isDefiniteExpr(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) { lhsType = getTypeDescFromExpr(lhsType); rhsType = getTypeDescFromExpr(rhsType); STNode newTypeDesc; if (separatorToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsType, separatorToken, rhsType); } else { newTypeDesc = createIntersectionTypeDesc(lhsType, separatorToken, rhsType); } STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.VARIABLE_NAME); STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP); } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode closeParen = parseCloseParenthesis(); STNode typeDescNode = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypeDescOrExpr(typeDescQualifiers); } private STNode parseTypeDescOrExpr(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers); break; case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket(); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @param qualifiers Preceding qualifiers * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature; if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) { funcSignature = parseFuncSignature(true); qualifierList = createFuncTypeQualNodeList(qualifiers, true); endContext(); return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature); } funcSignature = STNodeFactory.createEmptyNode(); qualifierList = createFuncTypeQualNodeList(qualifiers, false); STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) { ParserRuleContext currentCtx = getCurrentContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); } startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } private STNode parseTupleTypeDescOrExprStartsWithOpenBracket() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> members = new ArrayList<>(); STNode memberEnd; while (!isEndOfListConstructor(peek().kind)) { STNode expr = parseTypeDescOrExpr(); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) { STNode ellipsis = consume(); expr = STNodeFactory.createRestDescriptorNode(expr, ellipsis); } members.add(expr); memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } members.add(memberEnd); } STNode memberNodes = STNodeFactory.createNodeList(members); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket); } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern); return createCaptureOrWildcardBP(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPattern(typeDescQualifiers, context); } private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode identifier) { STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || endOfModuleLevelNode(1); } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } return parseErrorArgListBindingPatterns(argListBindingPatterns); } private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) { STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true); if (firstArg == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } switch (firstArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); case ERROR_BINDING_PATTERN: STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier); missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP, DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN); STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN, DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN); argListBindingPatterns.add(missingErrorMsgBP); argListBindingPatterns.add(missingComma); argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); default: addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBindingPatterns(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false); assert secondArg != null; switch (secondArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind); default: updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns, SyntaxKind lastValidArgKind) { while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false); assert currentArg != null; DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: STNode argNameOrSimpleBindingPattern = consume(); return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); case CLOSE_PAREN_TOKEN: if (isFirstArg) { return null; } default: recover(peek(), context); return parseErrorArgListBindingPattern(context, isFirstArg); } } private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) { STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { switch (peek().kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS: break; case NONE: default: STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(getBindingPattern(member)); memberList.add(memberEnd); bindingPattern = parseAsListBindingPattern(openBracket, memberList); typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) || isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext, isTypedBindingPattern); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = getKeyExpr(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = getKeyExpr(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS; if (isTypedBindingPattern) { recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS; } recover(peek(), recoveryCtx, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode getKeyExpr(STNode member) { if (member == null) { STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier); return STNodeFactory.createNodeList(missingVarRef); } return STNodeFactory.createNodeList(member); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns = STNodeFactory.createEmptyNodeList(); if (!isEmpty(member)) { SyntaxKind memberKind = member.kind; if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) { STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME); STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken); return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName); } if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member, DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP); } else { STNode bindingPattern = getBindingPattern(member); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } } STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @return Parsed node */ private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } if (isTypedBindingPattern) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr); } STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: if (isTypedBindingPattern) { return SyntaxKind.ARRAY_TYPE_DESC; } return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; case ERROR_CONSTRUCTOR: if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; default: if (isTypedBindingPattern) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList(boolean possibleMappingField) { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseStatementStartBracketedListMember(typeDescQualifiers); } private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); return parseMemberBracketedList(false); case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) { return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: reportInvalidQualifierList(qualifiers); return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: reportInvalidQualifierList(qualifiers); STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPatternOrErrorConstructor(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: reportInvalidQualifierList(qualifiers); return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(qualifiers); case FUNCTION_KEYWORD: return parseAnonFuncExprOrFuncTypeDesc(qualifiers); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers); return parseStatementStartBracketedListMember(qualifiers); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorConstructorExpr(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FINAL_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_START; } recover(peek(), recoveryCtx); return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FINAL_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_START; } recover(peek(), recoveryCtx); return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
Is this time out for PLAYBACK only, or time out for both? If PLAYBACK only, use `@DoNotRecord(skipInPlayback = true)` (`@LiveOnly` won't run in record mode)
public void canCreateVirtualMachineWithLMSIAndEMSI() { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id", 15); String networkName = generateRandomResourceName("nw", 10); ResourceGroup resourceGroup = resourceManager.resourceGroups().define(rgName).withRegion(region).create(); Network network = networkManager .networks() .define(networkName) .withRegion(region) .withExistingResourceGroup(resourceGroup) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withSystemAssignedManagedServiceIdentity() .withSystemAssignedIdentityBasedAccessTo(network.id(), BuiltInRole.CONTRIBUTOR) .withNewUserAssignedManagedServiceIdentity(creatableIdentity) .withSize(VirtualMachineSizeTypes.STANDARD_A0) .create(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName1)); PagedIterable<RoleAssignment> roleAssignmentsForNetwork = this.msiManager.authorizationManager().roleAssignments().listByScope(network.id()); boolean found = false; for (RoleAssignment roleAssignment : roleAssignmentsForNetwork) { if (roleAssignment.principalId() != null && roleAssignment .principalId() .equalsIgnoreCase(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the virtual network for local identity" + virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); RoleAssignment assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( network.id(), BuiltInRole.CONTRIBUTOR, virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the virtual network for system assigned identity"); ResourceGroup resourceGroup1 = resourceManager.resourceGroups().getByName(virtualMachine.resourceGroupName()); PagedIterable<RoleAssignment> roleAssignmentsForResourceGroup = this.msiManager.authorizationManager().roleAssignments().listByScope(resourceGroup1.id()); found = false; for (RoleAssignment roleAssignment : roleAssignmentsForResourceGroup) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(identity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the resource group for identity" + identity.name()); assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( resourceGroup1.id(), BuiltInRole.CONTRIBUTOR, identity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the resource group for system assigned identity"); }
public void canCreateVirtualMachineWithLMSIAndEMSI() { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id", 15); String networkName = generateRandomResourceName("nw", 10); ResourceGroup resourceGroup = resourceManager.resourceGroups().define(rgName).withRegion(region).create(); Network network = networkManager .networks() .define(networkName) .withRegion(region) .withExistingResourceGroup(resourceGroup) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withExistingResourceGroup(resourceGroup) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withSystemAssignedManagedServiceIdentity() .withSystemAssignedIdentityBasedAccessTo(network.id(), BuiltInRole.CONTRIBUTOR) .withNewUserAssignedManagedServiceIdentity(creatableIdentity) .withSize(VirtualMachineSizeTypes.STANDARD_A0) .create(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName1)); PagedIterable<RoleAssignment> roleAssignmentsForNetwork = this.msiManager.authorizationManager().roleAssignments().listByScope(network.id()); boolean found = false; for (RoleAssignment roleAssignment : roleAssignmentsForNetwork) { if (roleAssignment.principalId() != null && roleAssignment .principalId() .equalsIgnoreCase(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the virtual network for local identity" + virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); RoleAssignment assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( network.id(), BuiltInRole.CONTRIBUTOR, virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the virtual network for system assigned identity"); ResourceGroup resourceGroup1 = resourceManager.resourceGroups().getByName(virtualMachine.resourceGroupName()); PagedIterable<RoleAssignment> roleAssignmentsForResourceGroup = this.msiManager.authorizationManager().roleAssignments().listByScope(resourceGroup1.id()); found = false; for (RoleAssignment roleAssignment : roleAssignmentsForResourceGroup) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(identity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the resource group for identity" + identity.name()); assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( resourceGroup1.id(), BuiltInRole.CONTRIBUTOR, identity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the resource group for system assigned identity"); }
class VirtualMachineEMSILMSIOperationsTests extends ComputeManagementTest { private String rgName = ""; private Region region = Region.US_WEST_CENTRAL; private final String vmName = "javavm"; @Override protected void cleanUpResources() { this.resourceManager.resourceGroups().beginDeleteByName(rgName); } @Test @LiveOnly public void canCreateUpdateVirtualMachineWithEMSI() { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id", 15); String identityName2 = generateRandomResourceName("msi-id", 15); String networkName = generateRandomResourceName("nw", 10); Creatable<ResourceGroup> creatableRG = resourceManager.resourceGroups().define(rgName).withRegion(region); final Network network = networkManager.networks().define(networkName).withRegion(region).withNewResourceGroup(creatableRG).create(); final Identity createdIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withNewResourceGroup(creatableRG) .withAccessTo(network, BuiltInRole.READER) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName2) .withRegion(region) .withNewResourceGroup(creatableRG) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withExistingUserAssignedManagedServiceIdentity(createdIdentity) .withNewUserAssignedManagedServiceIdentity(creatableIdentity) .withSize(VirtualMachineSizeTypes.STANDARD_A0) .create(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(2, emsiIds.size()); Identity implicitlyCreatedIdentity = null; for (String emsiId : emsiIds) { Identity identity = msiManager.identities().getById(emsiId); Assertions.assertNotNull(identity); Assertions .assertTrue( identity.name().equalsIgnoreCase(identityName1) || identity.name().equalsIgnoreCase(identityName2)); Assertions.assertNotNull(identity.principalId()); if (identity.name().equalsIgnoreCase(identityName2)) { implicitlyCreatedIdentity = identity; } } Assertions.assertNotNull(implicitlyCreatedIdentity); PagedIterable<RoleAssignment> roleAssignmentsForNetwork = this.msiManager.authorizationManager().roleAssignments().listByScope(network.id()); boolean found = false; for (RoleAssignment roleAssignment : roleAssignmentsForNetwork) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(createdIdentity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the virtual network for identity" + createdIdentity.name()); RoleAssignment assignment = lookupRoleAssignmentUsingScopeAndRoleAsync(network.id(), BuiltInRole.READER, createdIdentity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the virtual network for identity"); ResourceGroup resourceGroup = resourceManager.resourceGroups().getByName(virtualMachine.resourceGroupName()); Assertions.assertNotNull(resourceGroup); PagedIterable<RoleAssignment> roleAssignmentsForResourceGroup = this.msiManager.authorizationManager().roleAssignments().listByScope(resourceGroup.id()); found = false; for (RoleAssignment roleAssignment : roleAssignmentsForResourceGroup) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(implicitlyCreatedIdentity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the resource group for identity" + implicitlyCreatedIdentity.name()); assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( resourceGroup.id(), BuiltInRole.CONTRIBUTOR, implicitlyCreatedIdentity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the resource group for identity"); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Iterator<String> itr = emsiIds.iterator(); virtualMachine .update() .withoutUserAssignedManagedServiceIdentity(itr.next()) .withoutUserAssignedManagedServiceIdentity(itr.next()) .apply(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } virtualMachine.refresh(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } itr = emsiIds.iterator(); Identity identity1 = msiManager.identities().getById(itr.next()); Identity identity2 = msiManager.identities().getById(itr.next()); virtualMachine .update() .withSystemAssignedManagedServiceIdentity() .withExistingUserAssignedManagedServiceIdentity(identity1) .withExistingUserAssignedManagedServiceIdentity(identity2) .apply(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(2, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); virtualMachine.refresh(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(2, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); itr = emsiIds.iterator(); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(itr.next()).apply(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(itr.next()).apply(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED)); } @Test @LiveOnly @Test public void canUpdateVirtualMachineWithEMSIAndLMSI() throws Exception { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id-1", 15); String identityName2 = generateRandomResourceName("msi-id-2", 15); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withSize(VirtualMachineSizeTypes.STANDARD_D2S_V3) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withExistingResourceGroup(virtualMachine.resourceGroupName()) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); virtualMachine = virtualMachine.update().withNewUserAssignedManagedServiceIdentity(creatableIdentity).apply(); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName1)); virtualMachine.update() .withNewDataDisk(10) .apply(); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity createdIdentity = msiManager .identities() .define(identityName2) .withRegion(region) .withExistingResourceGroup(virtualMachine.resourceGroupName()) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR) .create(); virtualMachine = virtualMachine .update() .withoutUserAssignedManagedServiceIdentity(identity.id()) .withExistingUserAssignedManagedServiceIdentity(createdIdentity) .apply(); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName2)); virtualMachine.update().withSystemAssignedManagedServiceIdentity().apply(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); virtualMachine.update().withoutSystemAssignedManagedServiceIdentity().apply(); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.USER_ASSIGNED)); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(identity.id()).apply(); Assertions.assertFalse(virtualMachine.isManagedServiceIdentityEnabled()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); } private Mono<RoleAssignment> lookupRoleAssignmentUsingScopeAndRoleAsync( final String scope, BuiltInRole role, final String principalId) { return this .msiManager .authorizationManager() .roleDefinitions() .getByScopeAndRoleNameAsync(scope, role.toString()) .flatMap( roleDefinition -> msiManager .authorizationManager() .roleAssignments() .listByScopeAsync(scope) .filter( roleAssignment -> roleAssignment.roleDefinitionId().equalsIgnoreCase(roleDefinition.id()) && roleAssignment.principalId().equalsIgnoreCase(principalId)) .singleOrEmpty()) .switchIfEmpty(Mono.defer(() -> Mono.empty())); } }
class VirtualMachineEMSILMSIOperationsTests extends ComputeManagementTest { private String rgName = ""; private Region region = Region.US_WEST2; private final String vmName = "javavm"; @Override protected void cleanUpResources() { this.resourceManager.resourceGroups().beginDeleteByName(rgName); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateUpdateVirtualMachineWithEMSI() { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id", 15); String identityName2 = generateRandomResourceName("msi-id", 15); String networkName = generateRandomResourceName("nw", 10); Creatable<ResourceGroup> creatableRG = resourceManager.resourceGroups().define(rgName).withRegion(region); final Network network = networkManager.networks().define(networkName).withRegion(region).withNewResourceGroup(creatableRG).create(); final Identity createdIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withNewResourceGroup(creatableRG) .withAccessTo(network, BuiltInRole.READER) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName2) .withRegion(region) .withNewResourceGroup(creatableRG) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withExistingUserAssignedManagedServiceIdentity(createdIdentity) .withNewUserAssignedManagedServiceIdentity(creatableIdentity) .withSize(VirtualMachineSizeTypes.STANDARD_A0) .create(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(2, emsiIds.size()); Identity implicitlyCreatedIdentity = null; for (String emsiId : emsiIds) { Identity identity = msiManager.identities().getById(emsiId); Assertions.assertNotNull(identity); Assertions .assertTrue( identity.name().equalsIgnoreCase(identityName1) || identity.name().equalsIgnoreCase(identityName2)); Assertions.assertNotNull(identity.principalId()); if (identity.name().equalsIgnoreCase(identityName2)) { implicitlyCreatedIdentity = identity; } } Assertions.assertNotNull(implicitlyCreatedIdentity); PagedIterable<RoleAssignment> roleAssignmentsForNetwork = this.msiManager.authorizationManager().roleAssignments().listByScope(network.id()); boolean found = false; for (RoleAssignment roleAssignment : roleAssignmentsForNetwork) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(createdIdentity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the virtual network for identity" + createdIdentity.name()); RoleAssignment assignment = lookupRoleAssignmentUsingScopeAndRoleAsync(network.id(), BuiltInRole.READER, createdIdentity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the virtual network for identity"); ResourceGroup resourceGroup = resourceManager.resourceGroups().getByName(virtualMachine.resourceGroupName()); Assertions.assertNotNull(resourceGroup); PagedIterable<RoleAssignment> roleAssignmentsForResourceGroup = this.msiManager.authorizationManager().roleAssignments().listByScope(resourceGroup.id()); found = false; for (RoleAssignment roleAssignment : roleAssignmentsForResourceGroup) { if (roleAssignment.principalId() != null && roleAssignment.principalId().equalsIgnoreCase(implicitlyCreatedIdentity.principalId())) { found = true; break; } } Assertions .assertTrue( found, "Expected role assignment not found for the resource group for identity" + implicitlyCreatedIdentity.name()); assignment = lookupRoleAssignmentUsingScopeAndRoleAsync( resourceGroup.id(), BuiltInRole.CONTRIBUTOR, implicitlyCreatedIdentity.principalId()) .block(); Assertions .assertNotNull( assignment, "Expected role assignment with ROLE not found for the resource group for identity"); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Iterator<String> itr = emsiIds.iterator(); virtualMachine .update() .withoutUserAssignedManagedServiceIdentity(itr.next()) .withoutUserAssignedManagedServiceIdentity(itr.next()) .apply(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } virtualMachine.refresh(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } itr = emsiIds.iterator(); Identity identity1 = msiManager.identities().getById(itr.next()); Identity identity2 = msiManager.identities().getById(itr.next()); virtualMachine .update() .withSystemAssignedManagedServiceIdentity() .withExistingUserAssignedManagedServiceIdentity(identity1) .withExistingUserAssignedManagedServiceIdentity(identity2) .apply(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(2, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); virtualMachine.refresh(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(2, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); itr = emsiIds.iterator(); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(itr.next()).apply(); Assertions.assertNotNull(virtualMachine.userAssignedManagedServiceIdentityIds()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(itr.next()).apply(); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED)); } @Test @DoNotRecord(skipInPlayback = true) @Test public void canUpdateVirtualMachineWithEMSIAndLMSI() throws Exception { rgName = generateRandomResourceName("java-emsi-c-rg", 15); String identityName1 = generateRandomResourceName("msi-id-1", 15); String identityName2 = generateRandomResourceName("msi-id-2", 15); VirtualMachine virtualMachine = computeManager .virtualMachines() .define(vmName) .withRegion(region) .withNewResourceGroup(rgName) .withNewPrimaryNetwork("10.0.0.0/28") .withPrimaryPrivateIPAddressDynamic() .withoutPrimaryPublicIPAddress() .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS) .withRootUsername("Foo12") .withSsh(sshPublicKey()) .withSize(VirtualMachineSizeTypes.STANDARD_A0) .create(); Creatable<Identity> creatableIdentity = msiManager .identities() .define(identityName1) .withRegion(region) .withExistingResourceGroup(virtualMachine.resourceGroupName()) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR); virtualMachine = virtualMachine.update().withNewUserAssignedManagedServiceIdentity(creatableIdentity).apply(); Set<String> emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName1)); virtualMachine.update() .withNewDataDisk(10) .apply(); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); Identity createdIdentity = msiManager .identities() .define(identityName2) .withRegion(region) .withExistingResourceGroup(virtualMachine.resourceGroupName()) .withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR) .create(); virtualMachine = virtualMachine .update() .withoutUserAssignedManagedServiceIdentity(identity.id()) .withExistingUserAssignedManagedServiceIdentity(createdIdentity) .apply(); emsiIds = virtualMachine.userAssignedManagedServiceIdentityIds(); Assertions.assertNotNull(emsiIds); Assertions.assertEquals(1, emsiIds.size()); identity = msiManager.identities().getById(emsiIds.iterator().next()); Assertions.assertNotNull(identity); Assertions.assertTrue(identity.name().equalsIgnoreCase(identityName2)); virtualMachine.update().withSystemAssignedManagedServiceIdentity().apply(); Assertions.assertNotNull(virtualMachine); Assertions.assertNotNull(virtualMachine.innerModel()); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions .assertTrue( virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED_USER_ASSIGNED)); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNotNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); virtualMachine.update().withoutSystemAssignedManagedServiceIdentity().apply(); Assertions.assertTrue(virtualMachine.isManagedServiceIdentityEnabled()); Assertions.assertNotNull(virtualMachine.managedServiceIdentityType()); Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.USER_ASSIGNED)); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(1, virtualMachine.userAssignedManagedServiceIdentityIds().size()); virtualMachine.update().withoutUserAssignedManagedServiceIdentity(identity.id()).apply(); Assertions.assertFalse(virtualMachine.isManagedServiceIdentityEnabled()); if (virtualMachine.managedServiceIdentityType() != null) { Assertions.assertTrue(virtualMachine.managedServiceIdentityType().equals(ResourceIdentityType.NONE)); } Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityPrincipalId()); Assertions.assertNull(virtualMachine.systemAssignedManagedServiceIdentityTenantId()); Assertions.assertEquals(0, virtualMachine.userAssignedManagedServiceIdentityIds().size()); } private Mono<RoleAssignment> lookupRoleAssignmentUsingScopeAndRoleAsync( final String scope, BuiltInRole role, final String principalId) { return this .msiManager .authorizationManager() .roleDefinitions() .getByScopeAndRoleNameAsync(scope, role.toString()) .flatMap( roleDefinition -> msiManager .authorizationManager() .roleAssignments() .listByScopeAsync(scope) .filter( roleAssignment -> roleAssignment.roleDefinitionId().equalsIgnoreCase(roleDefinition.id()) && roleAssignment.principalId().equalsIgnoreCase(principalId)) .singleOrEmpty()) .switchIfEmpty(Mono.defer(() -> Mono.empty())); } }
Replace `System.out` with log statement
InstanceIdentity sendInstanceRegisterRequest(InstanceRegisterInformation instanceRegisterInformation, String athenzUrl) { try(CloseableHttpClient client = HttpClientBuilder.create().build()) { ObjectMapper objectMapper = new ObjectMapper(); System.out.println(objectMapper.writeValueAsString(instanceRegisterInformation)); HttpUriRequest postRequest = RequestBuilder.post() .setUri(athenzUrl + "/instance") .setEntity(new StringEntity(objectMapper.writeValueAsString(instanceRegisterInformation), ContentType.APPLICATION_JSON)) .build(); CloseableHttpResponse response = client.execute(postRequest); if(HttpStatus.isSuccess(response.getStatusLine().getStatusCode())) { return objectMapper.readValue(response.getEntity().getContent(), InstanceIdentity.class); } else { String s = EntityUtils.toString(response.getEntity()); System.out.println("s = " + s); throw new RuntimeException(response.toString()); } } catch (IOException e) { throw new RuntimeException(e); } }
System.out.println(objectMapper.writeValueAsString(instanceRegisterInformation));
InstanceIdentity sendInstanceRegisterRequest(InstanceRegisterInformation instanceRegisterInformation, String athenzUrl) { try(CloseableHttpClient client = HttpClientBuilder.create().build()) { ObjectMapper objectMapper = new ObjectMapper(); System.out.println(objectMapper.writeValueAsString(instanceRegisterInformation)); HttpUriRequest postRequest = RequestBuilder.post() .setUri(athenzUrl + "/instance") .setEntity(new StringEntity(objectMapper.writeValueAsString(instanceRegisterInformation), ContentType.APPLICATION_JSON)) .build(); CloseableHttpResponse response = client.execute(postRequest); if(HttpStatus.isSuccess(response.getStatusLine().getStatusCode())) { return objectMapper.readValue(response.getEntity().getContent(), InstanceIdentity.class); } else { String s = EntityUtils.toString(response.getEntity()); System.out.println("s = " + s); throw new RuntimeException(response.toString()); } } catch (IOException e) { throw new RuntimeException(e); } }
class AthenzService { /** * Send instance register request to ZTS, get InstanceIdentity * * @param instanceRegisterInformation */ }
class AthenzService { /** * Send instance register request to ZTS, get InstanceIdentity * * @param instanceRegisterInformation */ }
Extracting `0` to channelId would make the test a bit more readable.
public void testSnapshotAfterEndOfPartition() throws Exception { VerifyRecordsDataOutput<Long> output = new VerifyRecordsDataOutput<>(); LongSerializer inSerializer = LongSerializer.INSTANCE; int numInputChannels = 1; StreamTestSingleInputGate<Long> inputGate = new StreamTestSingleInputGate<>(numInputChannels, 0, inSerializer, 1024); TestRecordDeserializer[] deserializers = IntStream.range(0, numInputChannels) .mapToObj(index -> new TestRecordDeserializer(ioManager.getSpillingDirectoriesPaths())) .toArray(TestRecordDeserializer[]::new); StreamTaskNetworkInput<Long> input = new StreamTaskNetworkInput<>( new CheckpointedInputGate( inputGate.getInputGate(), new CheckpointBarrierUnaligner( new int[] { 1 }, ChannelStateWriter.NO_OP, "test", new DummyCheckpointInvokable())), inSerializer, new StatusWatermarkValve(1, output), 0, deserializers); inputGate.sendEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0); inputGate.sendElement(new StreamRecord<>(42L), 0); assertHasNextElement(input, output); assertHasNextElement(input, output); assertEquals(1, output.getNumberOfEmittedRecords()); inputGate.sendEvent(EndOfPartitionEvent.INSTANCE, 0); input.emitNext(output); assertNull(deserializers[0]); CompletableFuture<Void> completableFuture = input.prepareSnapshot(ChannelStateWriter.NO_OP, 0); completableFuture.join(); }
assertNull(deserializers[0]);
public void testSnapshotAfterEndOfPartition() throws Exception { int numInputChannels = 1; int channelId = 0; int checkpointId = 0; VerifyRecordsDataOutput<Long> output = new VerifyRecordsDataOutput<>(); LongSerializer inSerializer = LongSerializer.INSTANCE; StreamTestSingleInputGate<Long> inputGate = new StreamTestSingleInputGate<>(numInputChannels, 0, inSerializer, 1024); TestRecordDeserializer[] deserializers = IntStream.range(0, numInputChannels) .mapToObj(index -> new TestRecordDeserializer(ioManager.getSpillingDirectoriesPaths())) .toArray(TestRecordDeserializer[]::new); StreamTaskNetworkInput<Long> input = new StreamTaskNetworkInput<>( new CheckpointedInputGate( inputGate.getInputGate(), new CheckpointBarrierUnaligner( new int[] { numInputChannels }, ChannelStateWriter.NO_OP, "test", new DummyCheckpointInvokable())), inSerializer, new StatusWatermarkValve(numInputChannels, output), 0, deserializers); inputGate.sendEvent( new CheckpointBarrier(checkpointId, 0L, CheckpointOptions.forCheckpointWithDefaultLocation()), channelId); inputGate.sendElement(new StreamRecord<>(42L), channelId); assertHasNextElement(input, output); assertHasNextElement(input, output); assertEquals(1, output.getNumberOfEmittedRecords()); inputGate.sendEvent(EndOfPartitionEvent.INSTANCE, channelId); input.emitNext(output); assertNull(deserializers[channelId]); CompletableFuture<Void> completableFuture = input.prepareSnapshot(ChannelStateWriter.NO_OP, checkpointId); completableFuture.join(); }
class StreamTaskNetworkInputTest { private static final int PAGE_SIZE = 1000; private final IOManager ioManager = new IOManagerAsync(); @After public void tearDown() throws Exception { ioManager.close(); } @Test public void testIsAvailableWithBufferedDataInDeserializer() throws Exception { List<BufferOrEvent> buffers = Collections.singletonList(createDataBuffer()); VerifyRecordsDataOutput output = new VerifyRecordsDataOutput<>(); StreamTaskNetworkInput input = createStreamTaskNetworkInput(buffers, output); assertHasNextElement(input, output); assertHasNextElement(input, output); assertEquals(2, output.getNumberOfEmittedRecords()); } /** * InputGate on CheckpointBarrier can enqueue a mailbox action to execute and StreamTaskNetworkInput must * allow this action to execute before processing a following record. */ @Test public void testNoDataProcessedAfterCheckpointBarrier() throws Exception { CheckpointBarrier barrier = new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()); List<BufferOrEvent> buffers = new ArrayList<>(2); buffers.add(new BufferOrEvent(barrier, 0)); buffers.add(createDataBuffer()); VerifyRecordsDataOutput output = new VerifyRecordsDataOutput<>(); StreamTaskNetworkInput input = createStreamTaskNetworkInput(buffers, output); assertHasNextElement(input, output); assertEquals(0, output.getNumberOfEmittedRecords()); } @Test @Test public void testReleasingDeserializerTimely() throws Exception { int numInputChannels = 2; LongSerializer inSerializer = LongSerializer.INSTANCE; StreamTestSingleInputGate inputGate = new StreamTestSingleInputGate<>(numInputChannels, 0, inSerializer, 1024); TestRecordDeserializer[] deserializers = new TestRecordDeserializer[numInputChannels]; for (int i = 0; i < deserializers.length; i++) { deserializers[i] = new TestRecordDeserializer(ioManager.getSpillingDirectoriesPaths()); } TestRecordDeserializer[] copiedDeserializers = Arrays.copyOf(deserializers, deserializers.length); DataOutput output = new NoOpDataOutput<>(); StreamTaskNetworkInput input = new StreamTaskNetworkInput<>( new CheckpointedInputGate( inputGate.getInputGate(), new CheckpointBarrierTracker(1, new DummyCheckpointInvokable())), inSerializer, new StatusWatermarkValve(1, output), 0, deserializers); for (int i = 0; i < numInputChannels; i++) { assertNotNull(deserializers[i]); inputGate.sendEvent(EndOfPartitionEvent.INSTANCE, i); input.emitNext(output); assertNull(deserializers[i]); assertTrue(copiedDeserializers[i].isCleared()); } } private BufferOrEvent createDataBuffer() throws IOException { BufferBuilder bufferBuilder = BufferBuilderTestUtils.createEmptyBufferBuilder(PAGE_SIZE); BufferConsumer bufferConsumer = bufferBuilder.createBufferConsumer(); serializeRecord(42L, bufferBuilder); serializeRecord(44L, bufferBuilder); return new BufferOrEvent(bufferConsumer.build(), 0, false); } private StreamTaskNetworkInput createStreamTaskNetworkInput(List<BufferOrEvent> buffers, DataOutput output) { return new StreamTaskNetworkInput<>( new CheckpointedInputGate( new MockInputGate(1, buffers, false), new CheckpointBarrierTracker(1, new DummyCheckpointInvokable())), LongSerializer.INSTANCE, ioManager, new StatusWatermarkValve(1, output), 0); } private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException { RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>(); SerializationDelegate<StreamElement> serializationDelegate = new SerializationDelegate<>( new StreamElementSerializer<>(LongSerializer.INSTANCE)); serializationDelegate.setInstance(new StreamRecord<>(value)); serializer.serializeRecord(serializationDelegate); assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer()); } private static void assertHasNextElement(StreamTaskNetworkInput input, DataOutput output) throws Exception { assertTrue(input.getAvailableFuture().isDone()); InputStatus status = input.emitNext(output); assertThat(status, is(InputStatus.MORE_AVAILABLE)); } private static class TestRecordDeserializer extends SpillingAdaptiveSpanningRecordDeserializer<DeserializationDelegate<StreamElement>> { private boolean cleared = false; public TestRecordDeserializer(String[] tmpDirectories) { super(tmpDirectories); } @Override public void clear() { cleared = true; } public boolean isCleared() { return cleared; } } private static class NoOpDataOutput<T> implements DataOutput<T> { @Override public void emitRecord(StreamRecord<T> record) { } @Override public void emitWatermark(Watermark watermark) { } @Override public void emitStreamStatus(StreamStatus streamStatus) { } @Override public void emitLatencyMarker(LatencyMarker latencyMarker) { } } private static class VerifyRecordsDataOutput<T> extends NoOpDataOutput<T> { private int numberOfEmittedRecords; @Override public void emitRecord(StreamRecord<T> record) { numberOfEmittedRecords++; } int getNumberOfEmittedRecords() { return numberOfEmittedRecords; } } }
class StreamTaskNetworkInputTest { private static final int PAGE_SIZE = 1000; private final IOManager ioManager = new IOManagerAsync(); @After public void tearDown() throws Exception { ioManager.close(); } @Test public void testIsAvailableWithBufferedDataInDeserializer() throws Exception { List<BufferOrEvent> buffers = Collections.singletonList(createDataBuffer()); VerifyRecordsDataOutput output = new VerifyRecordsDataOutput<>(); StreamTaskNetworkInput input = createStreamTaskNetworkInput(buffers, output); assertHasNextElement(input, output); assertHasNextElement(input, output); assertEquals(2, output.getNumberOfEmittedRecords()); } /** * InputGate on CheckpointBarrier can enqueue a mailbox action to execute and StreamTaskNetworkInput must * allow this action to execute before processing a following record. */ @Test public void testNoDataProcessedAfterCheckpointBarrier() throws Exception { CheckpointBarrier barrier = new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()); List<BufferOrEvent> buffers = new ArrayList<>(2); buffers.add(new BufferOrEvent(barrier, 0)); buffers.add(createDataBuffer()); VerifyRecordsDataOutput output = new VerifyRecordsDataOutput<>(); StreamTaskNetworkInput input = createStreamTaskNetworkInput(buffers, output); assertHasNextElement(input, output); assertEquals(0, output.getNumberOfEmittedRecords()); } @Test @Test public void testReleasingDeserializerTimely() throws Exception { int numInputChannels = 2; LongSerializer inSerializer = LongSerializer.INSTANCE; StreamTestSingleInputGate inputGate = new StreamTestSingleInputGate<>(numInputChannels, 0, inSerializer, 1024); TestRecordDeserializer[] deserializers = new TestRecordDeserializer[numInputChannels]; for (int i = 0; i < deserializers.length; i++) { deserializers[i] = new TestRecordDeserializer(ioManager.getSpillingDirectoriesPaths()); } TestRecordDeserializer[] copiedDeserializers = Arrays.copyOf(deserializers, deserializers.length); DataOutput output = new NoOpDataOutput<>(); StreamTaskNetworkInput input = new StreamTaskNetworkInput<>( new CheckpointedInputGate( inputGate.getInputGate(), new CheckpointBarrierTracker(1, new DummyCheckpointInvokable())), inSerializer, new StatusWatermarkValve(1, output), 0, deserializers); for (int i = 0; i < numInputChannels; i++) { assertNotNull(deserializers[i]); inputGate.sendEvent(EndOfPartitionEvent.INSTANCE, i); input.emitNext(output); assertNull(deserializers[i]); assertTrue(copiedDeserializers[i].isCleared()); } } private BufferOrEvent createDataBuffer() throws IOException { BufferBuilder bufferBuilder = BufferBuilderTestUtils.createEmptyBufferBuilder(PAGE_SIZE); BufferConsumer bufferConsumer = bufferBuilder.createBufferConsumer(); serializeRecord(42L, bufferBuilder); serializeRecord(44L, bufferBuilder); return new BufferOrEvent(bufferConsumer.build(), 0, false); } private StreamTaskNetworkInput createStreamTaskNetworkInput(List<BufferOrEvent> buffers, DataOutput output) { return new StreamTaskNetworkInput<>( new CheckpointedInputGate( new MockInputGate(1, buffers, false), new CheckpointBarrierTracker(1, new DummyCheckpointInvokable())), LongSerializer.INSTANCE, ioManager, new StatusWatermarkValve(1, output), 0); } private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException { RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>(); SerializationDelegate<StreamElement> serializationDelegate = new SerializationDelegate<>( new StreamElementSerializer<>(LongSerializer.INSTANCE)); serializationDelegate.setInstance(new StreamRecord<>(value)); serializer.serializeRecord(serializationDelegate); assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer()); } private static void assertHasNextElement(StreamTaskNetworkInput input, DataOutput output) throws Exception { assertTrue(input.getAvailableFuture().isDone()); InputStatus status = input.emitNext(output); assertThat(status, is(InputStatus.MORE_AVAILABLE)); } private static class TestRecordDeserializer extends SpillingAdaptiveSpanningRecordDeserializer<DeserializationDelegate<StreamElement>> { private boolean cleared = false; public TestRecordDeserializer(String[] tmpDirectories) { super(tmpDirectories); } @Override public void clear() { cleared = true; } public boolean isCleared() { return cleared; } } private static class NoOpDataOutput<T> implements DataOutput<T> { @Override public void emitRecord(StreamRecord<T> record) { } @Override public void emitWatermark(Watermark watermark) { } @Override public void emitStreamStatus(StreamStatus streamStatus) { } @Override public void emitLatencyMarker(LatencyMarker latencyMarker) { } } private static class VerifyRecordsDataOutput<T> extends NoOpDataOutput<T> { private int numberOfEmittedRecords; @Override public void emitRecord(StreamRecord<T> record) { numberOfEmittedRecords++; } int getNumberOfEmittedRecords() { return numberOfEmittedRecords; } } }
Inverted. I left it out initially since 2 vs 3 lines of code doesn't matter that much and I was just moving this code and too many modifications while moving the code can also be confusing :)
private Optional<BufferOrEvent> handleEmptyBuffer() throws Exception { if (!inputGate.isFinished()) { return Optional.empty(); } if (!endOfStream) { endOfStream = true; releaseBlocksAndResetBarriers(); return pollNext(); } else { isFinished = true; return Optional.empty(); } }
if (!endOfStream) {
private Optional<BufferOrEvent> handleEmptyBuffer() throws Exception { if (!inputGate.isFinished()) { return Optional.empty(); } if (endOfStream) { isFinished = true; return Optional.empty(); } else { endOfStream = true; releaseBlocksAndResetBarriers(); return pollNext(); } }
class BarrierBuffer implements CheckpointBarrierHandler { private static final Logger LOG = LoggerFactory.getLogger(BarrierBuffer.class); /** The gate that the buffer draws its input from. */ private final InputGate inputGate; /** Flags that indicate whether a channel is currently blocked/buffered. */ private final boolean[] blockedChannels; /** The total number of channels that this buffer handles data from. */ private final int totalNumberOfInputChannels; /** To utility to write blocked data to a file channel. */ private final BufferBlocker bufferBlocker; /** * The pending blocked buffer/event sequences. Must be consumed before requesting further data * from the input gate. */ private final ArrayDeque<BufferOrEventSequence> queuedBuffered; /** * The maximum number of bytes that may be buffered before an alignment is broken. -1 means * unlimited. */ private final long maxBufferedBytes; /** * The sequence of buffers/events that has been unblocked and must now be consumed before * requesting further data from the input gate. */ private BufferOrEventSequence currentBuffered; /** Handler that receives the checkpoint notifications. */ private AbstractInvokable toNotifyOnCheckpoint; /** The ID of the checkpoint for which we expect barriers. */ private long currentCheckpointId = -1L; /** * The number of received barriers (= number of blocked/buffered channels) IMPORTANT: A canceled * checkpoint must always have 0 barriers. */ private int numBarriersReceived; /** The number of already closed channels. */ private int numClosedChannels; /** The number of bytes in the queued spilled sequences. */ private long numQueuedBytes; /** The timestamp as in {@link System private long startOfAlignmentTimestamp; /** The time (in nanoseconds) that the latest alignment took. */ private long latestAlignmentDurationNanos; /** Flag to indicate whether we have drawn all available input. */ private boolean endOfStream; private boolean isFinished; /** * Creates a new checkpoint stream aligner. * * <p>There is no limit to how much data may be buffered during an alignment. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * * @throws IOException Thrown, when the spilling to temp files cannot be initialized. */ public BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker) throws IOException { this (inputGate, bufferBlocker, -1); } /** * Creates a new checkpoint stream aligner. * * <p>The aligner will allow only alignments that buffer up to the given number of bytes. * When that number is exceeded, it will stop the alignment and notify the task that the * checkpoint has been cancelled. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * @param maxBufferedBytes The maximum bytes to be buffered before the checkpoint aborts. * * @throws IOException Thrown, when the spilling to temp files cannot be initialized. */ public BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker, long maxBufferedBytes) throws IOException { checkArgument(maxBufferedBytes == -1 || maxBufferedBytes > 0); this.inputGate = inputGate; this.maxBufferedBytes = maxBufferedBytes; this.totalNumberOfInputChannels = inputGate.getNumberOfInputChannels(); this.blockedChannels = new boolean[this.totalNumberOfInputChannels]; this.bufferBlocker = checkNotNull(bufferBlocker); this.queuedBuffered = new ArrayDeque<BufferOrEventSequence>(); } @Override public CompletableFuture<?> isAvailable() { if (currentBuffered == null) { return inputGate.isAvailable(); } return AVAILABLE; } @Override public Optional<BufferOrEvent> pollNext() throws Exception { while (true) { Optional<BufferOrEvent> next; if (currentBuffered == null) { next = inputGate.pollNext(); } else { next = Optional.ofNullable(currentBuffered.getNext()); if (!next.isPresent()) { completeBufferedSequence(); return pollNext(); } } if (!next.isPresent()) { return handleEmptyBuffer(); } BufferOrEvent bufferOrEvent = next.get(); if (isBlocked(bufferOrEvent.getChannelIndex())) { bufferBlocker.add(bufferOrEvent); checkSizeLimit(); } else if (bufferOrEvent.isBuffer()) { return next; } else if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) { if (!endOfStream) { processBarrier((CheckpointBarrier) bufferOrEvent.getEvent(), bufferOrEvent.getChannelIndex()); } } else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) { processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent()); } else { if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) { processEndOfPartition(); } return next; } } } private void completeBufferedSequence() throws IOException { LOG.debug("{}: Finished feeding back buffered data.", inputGate.getOwningTaskName()); currentBuffered.cleanup(); currentBuffered = queuedBuffered.pollFirst(); if (currentBuffered != null) { currentBuffered.open(); numQueuedBytes -= currentBuffered.size(); } } private void processBarrier(CheckpointBarrier receivedBarrier, int channelIndex) throws Exception { final long barrierId = receivedBarrier.getId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyCheckpoint(receivedBarrier); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { onBarrier(channelIndex); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received checkpoint barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", inputGate.getOwningTaskName(), barrierId, currentCheckpointId); notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); releaseBlocksAndResetBarriers(); beginNewAlignment(barrierId, channelIndex); } else { return; } } else if (barrierId > currentCheckpointId) { beginNewAlignment(barrierId, channelIndex); } else { return; } if (numBarriersReceived + numClosedChannels == totalNumberOfInputChannels) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Received all barriers, triggering checkpoint {} at {}.", inputGate.getOwningTaskName(), receivedBarrier.getId(), receivedBarrier.getTimestamp()); } releaseBlocksAndResetBarriers(); notifyCheckpoint(receivedBarrier); } } private void processCancellationBarrier(CancelCheckpointMarker cancelBarrier) throws Exception { final long barrierId = cancelBarrier.getCheckpointId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyAbortOnCancellationBarrier(barrierId); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, aborting alignment.", inputGate.getOwningTaskName(), barrierId); } releaseBlocksAndResetBarriers(); notifyAbortOnCancellationBarrier(barrierId); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received cancellation barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", inputGate.getOwningTaskName(), barrierId, currentCheckpointId); releaseBlocksAndResetBarriers(); currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); notifyAbortOnCancellationBarrier(barrierId); } } else if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, skipping alignment.", inputGate.getOwningTaskName(), barrierId); } notifyAbortOnCancellationBarrier(barrierId); } } private void processEndOfPartition() throws Exception { numClosedChannels++; if (numBarriersReceived > 0) { notifyAbort(currentCheckpointId, new InputEndOfStreamException()); releaseBlocksAndResetBarriers(); } } private void notifyCheckpoint(CheckpointBarrier checkpointBarrier) throws Exception { if (toNotifyOnCheckpoint != null) { CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointBarrier.getId(), checkpointBarrier.getTimestamp()); long bytesBuffered = currentBuffered != null ? currentBuffered.size() : 0L; CheckpointMetrics checkpointMetrics = new CheckpointMetrics() .setBytesBufferedInAlignment(bytesBuffered) .setAlignmentDurationNanos(latestAlignmentDurationNanos); toNotifyOnCheckpoint.triggerCheckpointOnBarrier( checkpointMetaData, checkpointBarrier.getCheckpointOptions(), checkpointMetrics); } } private void notifyAbortOnCancellationBarrier(long checkpointId) throws Exception { notifyAbort(checkpointId, new CheckpointDeclineOnCancellationBarrierException()); } private void notifyAbort(long checkpointId, CheckpointDeclineException cause) throws Exception { if (toNotifyOnCheckpoint != null) { toNotifyOnCheckpoint.abortCheckpointOnBarrier(checkpointId, cause); } } private void checkSizeLimit() throws Exception { if (maxBufferedBytes > 0 && (numQueuedBytes + bufferBlocker.getBytesBlocked()) > maxBufferedBytes) { LOG.info("{}: Checkpoint {} aborted because alignment volume limit ({} bytes) exceeded.", inputGate.getOwningTaskName(), currentCheckpointId, maxBufferedBytes); releaseBlocksAndResetBarriers(); notifyAbort(currentCheckpointId, new AlignmentLimitExceededException(maxBufferedBytes)); } } @Override public void registerCheckpointEventHandler(AbstractInvokable toNotifyOnCheckpoint) { if (this.toNotifyOnCheckpoint == null) { this.toNotifyOnCheckpoint = toNotifyOnCheckpoint; } else { throw new IllegalStateException("BarrierBuffer already has a registered checkpoint notifyee"); } } @Override public boolean isEmpty() { return currentBuffered == null; } @Override public boolean isFinished() { return isFinished; } @Override public void cleanup() throws IOException { bufferBlocker.close(); if (currentBuffered != null) { currentBuffered.cleanup(); } for (BufferOrEventSequence seq : queuedBuffered) { seq.cleanup(); } queuedBuffered.clear(); numQueuedBytes = 0L; } private void beginNewAlignment(long checkpointId, int channelIndex) throws IOException { currentCheckpointId = checkpointId; onBarrier(channelIndex); startOfAlignmentTimestamp = System.nanoTime(); if (LOG.isDebugEnabled()) { LOG.debug("{}: Starting stream alignment for checkpoint {}.", inputGate.getOwningTaskName(), checkpointId); } } /** * Checks whether the channel with the given index is blocked. * * @param channelIndex The channel index to check. * @return True if the channel is blocked, false if not. */ private boolean isBlocked(int channelIndex) { return blockedChannels[channelIndex]; } /** * Blocks the given channel index, from which a barrier has been received. * * @param channelIndex The channel index to block. */ private void onBarrier(int channelIndex) throws IOException { if (!blockedChannels[channelIndex]) { blockedChannels[channelIndex] = true; numBarriersReceived++; if (LOG.isDebugEnabled()) { LOG.debug("{}: Received barrier from channel {}.", inputGate.getOwningTaskName(), channelIndex); } } else { throw new IOException("Stream corrupt: Repeated barrier for same checkpoint on input " + channelIndex); } } /** * Releases the blocks on all channels and resets the barrier count. * Makes sure the just written data is the next to be consumed. */ private void releaseBlocksAndResetBarriers() throws IOException { LOG.debug("{}: End of stream alignment, feeding buffered data back.", inputGate.getOwningTaskName()); for (int i = 0; i < blockedChannels.length; i++) { blockedChannels[i] = false; } if (currentBuffered == null) { currentBuffered = bufferBlocker.rollOverReusingResources(); if (currentBuffered != null) { currentBuffered.open(); } } else { LOG.debug("{}: Checkpoint skipped via buffered data:" + "Pushing back current alignment buffers and feeding back new alignment data first.", inputGate.getOwningTaskName()); BufferOrEventSequence bufferedNow = bufferBlocker.rollOverWithoutReusingResources(); if (bufferedNow != null) { bufferedNow.open(); queuedBuffered.addFirst(currentBuffered); numQueuedBytes += currentBuffered.size(); currentBuffered = bufferedNow; } } if (LOG.isDebugEnabled()) { LOG.debug("{}: Size of buffered data: {} bytes", inputGate.getOwningTaskName(), currentBuffered == null ? 0L : currentBuffered.size()); } numBarriersReceived = 0; if (startOfAlignmentTimestamp > 0) { latestAlignmentDurationNanos = System.nanoTime() - startOfAlignmentTimestamp; startOfAlignmentTimestamp = 0; } } /** * Gets the ID defining the current pending, or just completed, checkpoint. * * @return The ID of the pending of completed checkpoint. */ public long getCurrentCheckpointId() { return this.currentCheckpointId; } @Override public long getAlignmentDurationNanos() { long start = this.startOfAlignmentTimestamp; if (start <= 0) { return latestAlignmentDurationNanos; } else { return System.nanoTime() - start; } } @Override public String toString() { return String.format("%s: last checkpoint: %d, current barriers: %d, closed channels: %d", inputGate.getOwningTaskName(), currentCheckpointId, numBarriersReceived, numClosedChannels); } }
class BarrierBuffer implements CheckpointBarrierHandler { private static final Logger LOG = LoggerFactory.getLogger(BarrierBuffer.class); /** The gate that the buffer draws its input from. */ private final InputGate inputGate; /** Flags that indicate whether a channel is currently blocked/buffered. */ private final boolean[] blockedChannels; /** The total number of channels that this buffer handles data from. */ private final int totalNumberOfInputChannels; /** To utility to write blocked data to a file channel. */ private final BufferBlocker bufferBlocker; /** * The pending blocked buffer/event sequences. Must be consumed before requesting further data * from the input gate. */ private final ArrayDeque<BufferOrEventSequence> queuedBuffered; /** * The maximum number of bytes that may be buffered before an alignment is broken. -1 means * unlimited. */ private final long maxBufferedBytes; private final String taskName; /** * The sequence of buffers/events that has been unblocked and must now be consumed before * requesting further data from the input gate. */ private BufferOrEventSequence currentBuffered; /** Handler that receives the checkpoint notifications. */ private AbstractInvokable toNotifyOnCheckpoint; /** The ID of the checkpoint for which we expect barriers. */ private long currentCheckpointId = -1L; /** * The number of received barriers (= number of blocked/buffered channels) IMPORTANT: A canceled * checkpoint must always have 0 barriers. */ private int numBarriersReceived; /** The number of already closed channels. */ private int numClosedChannels; /** The number of bytes in the queued spilled sequences. */ private long numQueuedBytes; /** The timestamp as in {@link System private long startOfAlignmentTimestamp; /** The time (in nanoseconds) that the latest alignment took. */ private long latestAlignmentDurationNanos; /** Flag to indicate whether we have drawn all available input. */ private boolean endOfStream; /** Indicate end of the input. Set to true after encountering {@link * {@link private boolean isFinished; /** * Creates a new checkpoint stream aligner. * * <p>There is no limit to how much data may be buffered during an alignment. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. */ @VisibleForTesting BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker) { this (inputGate, bufferBlocker, -1, "Testing: No task associated"); } /** * Creates a new checkpoint stream aligner. * * <p>The aligner will allow only alignments that buffer up to the given number of bytes. * When that number is exceeded, it will stop the alignment and notify the task that the * checkpoint has been cancelled. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * @param maxBufferedBytes The maximum bytes to be buffered before the checkpoint aborts. * @param taskName The task name for logging. */ BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker, long maxBufferedBytes, String taskName) { checkArgument(maxBufferedBytes == -1 || maxBufferedBytes > 0); this.inputGate = inputGate; this.maxBufferedBytes = maxBufferedBytes; this.totalNumberOfInputChannels = inputGate.getNumberOfInputChannels(); this.blockedChannels = new boolean[this.totalNumberOfInputChannels]; this.bufferBlocker = checkNotNull(bufferBlocker); this.queuedBuffered = new ArrayDeque<BufferOrEventSequence>(); this.taskName = taskName; } @Override public CompletableFuture<?> isAvailable() { if (currentBuffered == null) { return inputGate.isAvailable(); } return AVAILABLE; } @Override public Optional<BufferOrEvent> pollNext() throws Exception { while (true) { Optional<BufferOrEvent> next; if (currentBuffered == null) { next = inputGate.pollNext(); } else { next = Optional.ofNullable(currentBuffered.getNext()); if (!next.isPresent()) { completeBufferedSequence(); return pollNext(); } } if (!next.isPresent()) { return handleEmptyBuffer(); } BufferOrEvent bufferOrEvent = next.get(); if (isBlocked(bufferOrEvent.getChannelIndex())) { bufferBlocker.add(bufferOrEvent); checkSizeLimit(); } else if (bufferOrEvent.isBuffer()) { return next; } else if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) { if (!endOfStream) { processBarrier((CheckpointBarrier) bufferOrEvent.getEvent(), bufferOrEvent.getChannelIndex()); } } else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) { processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent()); } else { if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) { processEndOfPartition(); } return next; } } } private void completeBufferedSequence() throws IOException { LOG.debug("{}: Finished feeding back buffered data.", taskName); currentBuffered.cleanup(); currentBuffered = queuedBuffered.pollFirst(); if (currentBuffered != null) { currentBuffered.open(); numQueuedBytes -= currentBuffered.size(); } } private void processBarrier(CheckpointBarrier receivedBarrier, int channelIndex) throws Exception { final long barrierId = receivedBarrier.getId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyCheckpoint(receivedBarrier); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { onBarrier(channelIndex); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received checkpoint barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", taskName, barrierId, currentCheckpointId); notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); releaseBlocksAndResetBarriers(); beginNewAlignment(barrierId, channelIndex); } else { return; } } else if (barrierId > currentCheckpointId) { beginNewAlignment(barrierId, channelIndex); } else { return; } if (numBarriersReceived + numClosedChannels == totalNumberOfInputChannels) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Received all barriers, triggering checkpoint {} at {}.", taskName, receivedBarrier.getId(), receivedBarrier.getTimestamp()); } releaseBlocksAndResetBarriers(); notifyCheckpoint(receivedBarrier); } } private void processCancellationBarrier(CancelCheckpointMarker cancelBarrier) throws Exception { final long barrierId = cancelBarrier.getCheckpointId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyAbortOnCancellationBarrier(barrierId); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, aborting alignment.", taskName, barrierId); } releaseBlocksAndResetBarriers(); notifyAbortOnCancellationBarrier(barrierId); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received cancellation barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", taskName, barrierId, currentCheckpointId); releaseBlocksAndResetBarriers(); currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); notifyAbortOnCancellationBarrier(barrierId); } } else if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, skipping alignment.", taskName, barrierId); } notifyAbortOnCancellationBarrier(barrierId); } } private void processEndOfPartition() throws Exception { numClosedChannels++; if (numBarriersReceived > 0) { notifyAbort(currentCheckpointId, new InputEndOfStreamException()); releaseBlocksAndResetBarriers(); } } private void notifyCheckpoint(CheckpointBarrier checkpointBarrier) throws Exception { if (toNotifyOnCheckpoint != null) { CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointBarrier.getId(), checkpointBarrier.getTimestamp()); long bytesBuffered = currentBuffered != null ? currentBuffered.size() : 0L; CheckpointMetrics checkpointMetrics = new CheckpointMetrics() .setBytesBufferedInAlignment(bytesBuffered) .setAlignmentDurationNanos(latestAlignmentDurationNanos); toNotifyOnCheckpoint.triggerCheckpointOnBarrier( checkpointMetaData, checkpointBarrier.getCheckpointOptions(), checkpointMetrics); } } private void notifyAbortOnCancellationBarrier(long checkpointId) throws Exception { notifyAbort(checkpointId, new CheckpointDeclineOnCancellationBarrierException()); } private void notifyAbort(long checkpointId, CheckpointDeclineException cause) throws Exception { if (toNotifyOnCheckpoint != null) { toNotifyOnCheckpoint.abortCheckpointOnBarrier(checkpointId, cause); } } private void checkSizeLimit() throws Exception { if (maxBufferedBytes > 0 && (numQueuedBytes + bufferBlocker.getBytesBlocked()) > maxBufferedBytes) { LOG.info("{}: Checkpoint {} aborted because alignment volume limit ({} bytes) exceeded.", taskName, currentCheckpointId, maxBufferedBytes); releaseBlocksAndResetBarriers(); notifyAbort(currentCheckpointId, new AlignmentLimitExceededException(maxBufferedBytes)); } } @Override public void registerCheckpointEventHandler(AbstractInvokable toNotifyOnCheckpoint) { if (this.toNotifyOnCheckpoint == null) { this.toNotifyOnCheckpoint = toNotifyOnCheckpoint; } else { throw new IllegalStateException("BarrierBuffer already has a registered checkpoint notifyee"); } } @Override public boolean isEmpty() { return currentBuffered == null; } @Override public boolean isFinished() { return isFinished; } @Override public void cleanup() throws IOException { bufferBlocker.close(); if (currentBuffered != null) { currentBuffered.cleanup(); } for (BufferOrEventSequence seq : queuedBuffered) { seq.cleanup(); } queuedBuffered.clear(); numQueuedBytes = 0L; } private void beginNewAlignment(long checkpointId, int channelIndex) throws IOException { currentCheckpointId = checkpointId; onBarrier(channelIndex); startOfAlignmentTimestamp = System.nanoTime(); if (LOG.isDebugEnabled()) { LOG.debug("{}: Starting stream alignment for checkpoint {}.", taskName, checkpointId); } } /** * Checks whether the channel with the given index is blocked. * * @param channelIndex The channel index to check. * @return True if the channel is blocked, false if not. */ private boolean isBlocked(int channelIndex) { return blockedChannels[channelIndex]; } /** * Blocks the given channel index, from which a barrier has been received. * * @param channelIndex The channel index to block. */ private void onBarrier(int channelIndex) throws IOException { if (!blockedChannels[channelIndex]) { blockedChannels[channelIndex] = true; numBarriersReceived++; if (LOG.isDebugEnabled()) { LOG.debug("{}: Received barrier from channel {}.", taskName, channelIndex); } } else { throw new IOException("Stream corrupt: Repeated barrier for same checkpoint on input " + channelIndex); } } /** * Releases the blocks on all channels and resets the barrier count. * Makes sure the just written data is the next to be consumed. */ private void releaseBlocksAndResetBarriers() throws IOException { LOG.debug("{}: End of stream alignment, feeding buffered data back.", taskName); for (int i = 0; i < blockedChannels.length; i++) { blockedChannels[i] = false; } if (currentBuffered == null) { currentBuffered = bufferBlocker.rollOverReusingResources(); if (currentBuffered != null) { currentBuffered.open(); } } else { LOG.debug("{}: Checkpoint skipped via buffered data:" + "Pushing back current alignment buffers and feeding back new alignment data first.", taskName); BufferOrEventSequence bufferedNow = bufferBlocker.rollOverWithoutReusingResources(); if (bufferedNow != null) { bufferedNow.open(); queuedBuffered.addFirst(currentBuffered); numQueuedBytes += currentBuffered.size(); currentBuffered = bufferedNow; } } if (LOG.isDebugEnabled()) { LOG.debug("{}: Size of buffered data: {} bytes", taskName, currentBuffered == null ? 0L : currentBuffered.size()); } numBarriersReceived = 0; if (startOfAlignmentTimestamp > 0) { latestAlignmentDurationNanos = System.nanoTime() - startOfAlignmentTimestamp; startOfAlignmentTimestamp = 0; } } /** * Gets the ID defining the current pending, or just completed, checkpoint. * * @return The ID of the pending of completed checkpoint. */ public long getCurrentCheckpointId() { return this.currentCheckpointId; } @Override public long getAlignmentDurationNanos() { long start = this.startOfAlignmentTimestamp; if (start <= 0) { return latestAlignmentDurationNanos; } else { return System.nanoTime() - start; } } @Override public String toString() { return String.format("%s: last checkpoint: %d, current barriers: %d, closed channels: %d", taskName, currentCheckpointId, numBarriersReceived, numClosedChannels); } }
dont we need orgname and modulename here?
private static CompileResult compileOnJBallerina(String sourceFilePath) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot.toString()); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); CompileResult compileResult = compile(context, packageName, CompilerPhase.BIR_GEN, false); if (compileResult.getErrorCount() > 0) { return compileResult; } BLangPackage bLangPackage = (BLangPackage) compileResult.getAST(); byte[] compiledJar = JVMCodeGen.generateJarBinary(bLangPackage, context, packageName); compileResult.setEntryClassName(FileUtils.cleanupFileExtension(packageName)); classLoader.setClassContent(compiledJar); Class<?> clazz = classLoader.loadClass(compileResult.getEntryClassName()); String funcName = "__init_"; try { Method method = clazz.getDeclaredMethod(funcName, Strand.class); method.invoke(null, new Strand()); } catch (Exception e) { throw new RuntimeException("Error while invoking function '" + funcName + "'", e); } compileResult.setEntryClass(clazz); return compileResult; }
String funcName = "__init_";
private static CompileResult compileOnJBallerina(String sourceFilePath) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot.toString()); options.put(COMPILER_PHASE, CompilerPhase.BIR_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); CompileResult compileResult = compile(context, packageName, CompilerPhase.BIR_GEN, false); if (compileResult.getErrorCount() > 0) { return compileResult; } BLangPackage bLangPackage = (BLangPackage) compileResult.getAST(); byte[] compiledJar = JVMCodeGen.generateJarBinary(bLangPackage, context, packageName); JBallerinaInMemoryClassLoader classLoader = new JBallerinaInMemoryClassLoader(compiledJar); String entryClassName = FileUtils.cleanupFileExtension(packageName); Class<?> clazz = classLoader.loadClass(entryClassName); String funcName; PackageID pkgID = bLangPackage.packageID; if (!pkgID.name.value.equalsIgnoreCase(".")) { funcName = pkgID.orgName.value + "/" + pkgID.name.value + ":" + pkgID.version.value + MODULE_INIT_SUFFIX; } else { funcName = MODULE_INIT_SUFFIX; } try { Method method = clazz.getDeclaredMethod(funcName, Strand.class); method.invoke(null, new Strand()); } catch (Exception e) { throw new RuntimeException("Error while invoking function '" + funcName + "'", e); } compileResult.setEntryClass(clazz); return compileResult; }
class BCompileUtil { private static Path resourceDir = Paths.get("src/test/resources").toAbsolutePath(); private static final JBallerinaInMemoryClassLoader classLoader = new JBallerinaInMemoryClassLoader(); /** * Compile and return the semantic errors. Error scenarios cannot use this method. * * @param sourceFilePath Path to source module/file * @return compileResult */ public static CompileResult compileAndSetup(String sourceFilePath) { CompileResult compileResult = compile(sourceFilePath, CompilerPhase.CODE_GEN); BRunUtil.invokePackageInit(compileResult); return compileResult; } /** * Compile and return the semantic errors. Error scenarios cannot use this method. * * @param obj this is to find the original callers location. * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @return compileResult */ public static CompileResult compileAndSetup(Object obj, String sourceRoot, String packageName) { CompileResult compileResult = compile(obj, sourceRoot, packageName); BRunUtil.invokePackageInit(compileResult, packageName); return compileResult; } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source module/file * @return Semantic errors */ public static CompileResult compileAndGetBIR(String sourceFilePath) { return compile(sourceFilePath, CompilerPhase.BIR_GEN); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source module/file * @return Semantic errors */ public static CompileResult compile(String sourceFilePath) { if (jBallerinaTestsEnabled()) { return compileOnJBallerina(sourceFilePath); } return compile(sourceFilePath, CompilerPhase.CODE_GEN); } static boolean jBallerinaTestsEnabled() { String value = System.getProperty(ENABLE_JBALLERINA_TESTS); return value != null && Boolean.valueOf(value); } public static CompileResult compileWithoutExperimentalFeatures(String sourceFilePath) { return compile(sourceFilePath, CompilerPhase.CODE_GEN, false); } public static CompileResult compile(String sourceFilePath, boolean isSiddhiRuntimeEnabled) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); return compile(sourceRoot.toString(), packageName, CompilerPhase.CODE_GEN, isSiddhiRuntimeEnabled, true); } /** * Compile and return the semantic errors. * * @param obj this is to find the original callers location. * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @return Semantic errors */ public static CompileResult compile(Object obj, String sourceRoot, String packageName) { try { String effectiveSource; CodeSource codeSource = obj.getClass().getProtectionDomain().getCodeSource(); URL location = codeSource.getLocation(); URI locationUri = location.toURI(); Path pathLocation = Paths.get(locationUri); String filePath = concatFileName(sourceRoot, pathLocation); Path rootPath = Paths.get(filePath); Path packagePath = Paths.get(packageName); if (Files.isDirectory(packagePath)) { String[] pkgParts = packageName.split("\\/"); List<Name> pkgNameComps = Arrays.stream(pkgParts) .map(part -> { if (part.equals("")) { return Names.EMPTY; } else if (part.equals("_")) { return Names.EMPTY; } return new Name(part); }) .collect(Collectors.toList()); PackageID pkgId = new PackageID(Names.ANON_ORG, pkgNameComps, Names.DEFAULT_VERSION); effectiveSource = pkgId.getName().getValue(); return compile(rootPath.toString(), effectiveSource, CompilerPhase.CODE_GEN); } else { effectiveSource = packageName; return compile(rootPath.toString(), effectiveSource, CompilerPhase.CODE_GEN, new FileSystemProjectDirectory(rootPath)); } } catch (URISyntaxException e) { throw new IllegalArgumentException("error while running test: " + e.getMessage()); } } /** * <p> * concatenates a given filename to the provided path in directory. * </p> * <p> * <b>Note : </b> this function is relevant since in Unix the directory would be separated from backslash and * in unix the folder will be separated from forward slash. * </p> * * @param fileName name of the file. * @param pathLocation location of the directory. * @return the path with directoryName + file. */ public static String concatFileName(String fileName, Path pathLocation) { final String windowsFolderSeparator = "\\"; final String unixFolderSeparator = "/"; StringBuilder path = new StringBuilder(pathLocation.toAbsolutePath().toString()); if (pathLocation.endsWith(windowsFolderSeparator)) { path = path.append(windowsFolderSeparator).append(fileName); } else { path = path.append(unixFolderSeparator).append(fileName); } return path.toString(); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source package/file * @param compilerPhase Compiler phase * @param enableExpFeatures Flag indicating to enable the experimental feature * @return Semantic errors */ public static CompileResult compile(String sourceFilePath, CompilerPhase compilerPhase, boolean enableExpFeatures) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); return compile(sourceRoot.toString(), packageName, compilerPhase, enableExpFeatures); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source package/file * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compile(String sourceFilePath, CompilerPhase compilerPhase) { return compile(sourceFilePath, compilerPhase, true); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @param enableExpFeatures Flag indicating to enable the experimental features * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return compile(context, packageName, compilerPhase, false); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @param isSiddhiRuntimeEnabled Flag indicating to enable siddhi runtime for stream processing * @param enableExpFeatures Flag indicating to enable the experimental features * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, boolean isSiddhiRuntimeEnabled, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.SIDDHI_RUNTIME_ENABLED, Boolean.toString(isSiddhiRuntimeEnabled)); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return compile(context, packageName, compilerPhase, false); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase) { return compile(sourceRoot, packageName, compilerPhase, true); } /** * Compile with tests and return the semantic errors. * * @param context Compiler Context * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compileWithTests(CompilerContext context, String packageName, CompilerPhase compilerPhase) { return compile(context, packageName, compilerPhase, true); } /** * Create a compiler context. * * @param sourceRoot source root or project directory path * @param compilerPhase Compiler phase * @return new compiler context object */ public static CompilerContext createCompilerContext(String sourceRoot, CompilerPhase compilerPhase) { return createCompilerContext(sourceRoot, compilerPhase, Boolean.TRUE); } public static CompilerContext createCompilerContext(String sourceRoot, CompilerPhase compilerPhase, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(TEST_ENABLED, "true"); options.put(SKIP_TESTS, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return context; } public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, SourceDirectory sourceDirectory) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); context.put(SourceDirectory.class, sourceDirectory); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage packageNode = compiler.compile(packageName); comResult.setAST(packageNode); CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(packageNode); if (programFile != null) { comResult.setProgFile(LauncherUtils.getExecutableProgram(programFile)); } return comResult; } private static CompileResult compile(CompilerContext context, String packageName, CompilerPhase compilerPhase, boolean withTests) { CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage packageNode = compiler.compile(packageName, true); comResult.setAST(packageNode); if (comResult.getErrorCount() > 0) { return comResult; } else if (CompilerPhase.CODE_GEN.compareTo(compilerPhase) > 0 || compilerPhase == CompilerPhase.BIR_GEN) { return comResult; } CompiledBinaryFile.ProgramFile programFile; if (withTests && packageNode.containsTestablePkg()) { programFile = compiler.getExecutableProgram(packageNode.getTestablePkg()); } else { programFile = compiler.getExecutableProgram(packageNode); } if (programFile != null) { ProgramFile pFile = LauncherUtils.getExecutableProgram(programFile); comResult.setProgFile(pFile); } return comResult; } /** * Compile and return the compiled package node. * * @param sourceFilePath Path to source module/file * @return compiled module node */ public static BLangPackage compileAndGetPackage(String sourceFilePath) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, resourceDir.resolve(sourceRoot).toString()); options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); return compiler.compile(packageName); } /** * Compile and run a ballerina file. * * @param sourceFilePath Path to the ballerina file. * @param functionName The name of the function to run */ public static void run(String sourceFilePath, String functionName) { CompileResult result = compile(sourceFilePath); ProgramFile programFile = result.getProgFile(); if (MAIN_FUNCTION_NAME.equals(functionName) && !programFile.isMainEPAvailable() && !programFile.isServiceEPAvailable()) { throw new RuntimeException("main function not found in '" + programFile.getProgramFilePath() + "'"); } if (programFile.isMainEPAvailable() || !MAIN_FUNCTION_NAME.equals(functionName)) { LauncherUtils.runMain(programFile, functionName, new String[0], false); } else { LauncherUtils.runServices(programFile); } } public static String readFileAsString(String path) throws IOException { InputStream is = ClassLoader.getSystemResourceAsStream(path); InputStreamReader inputStreamREader = null; BufferedReader br = null; StringBuilder sb = new StringBuilder(); try { inputStreamREader = new InputStreamReader(is, StandardCharsets.UTF_8); br = new BufferedReader(inputStreamREader); String content = br.readLine(); if (content == null) { return sb.toString(); } sb.append(content); while ((content = br.readLine()) != null) { sb.append('\n').append(content); } } finally { if (inputStreamREader != null) { try { inputStreamREader.close(); } catch (IOException ignore) { } } if (br != null) { try { br.close(); } catch (IOException ignore) { } } } return sb.toString(); } public static BMap<String, BValue> createAndGetStruct(ProgramFile programFile, String packagePath, String structName) { PackageInfo structPackageInfo = programFile.getPackageInfo(packagePath); StructureTypeInfo typeInfo = structPackageInfo.getStructInfo(structName); return BLangVMStructs.createBStruct(typeInfo); } /** * Used by IntelliJ IDEA plugin to provide semantic analyzing capability. * * @param classLoader a {@link ClassLoader} to be set as thread context class loader. This is used by {@link * java.util.ServiceLoader}. Otherwise semantic analyzing capability providing wont work since it * cant find core package. * @param sourceRoot source root of a project * @param fileName either the file name (if in project root) or the module name * @return list of diagnostics */ public static List<Diagnostic> getDiagnostics(ClassLoader classLoader, String sourceRoot, String fileName) { Thread.currentThread().setContextClassLoader(classLoader); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage entryPackageNode = compiler.compile(fileName); CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPackageNode); if (programFile != null) { comResult.setProgFile(LauncherUtils.getExecutableProgram(programFile)); } Diagnostic[] diagnostics = comResult.getDiagnostics(); return Arrays.stream(diagnostics).collect(Collectors.toList()); } }
class BCompileUtil { private static Path resourceDir = Paths.get("src/test/resources").toAbsolutePath(); private static final String MODULE_INIT_SUFFIX = "__init_"; /** * Compile and return the semantic errors. Error scenarios cannot use this method. * * @param sourceFilePath Path to source module/file * @return compileResult */ public static CompileResult compileAndSetup(String sourceFilePath) { CompileResult compileResult = compile(sourceFilePath, CompilerPhase.CODE_GEN); BRunUtil.invokePackageInit(compileResult); return compileResult; } /** * Compile and return the semantic errors. Error scenarios cannot use this method. * * @param obj this is to find the original callers location. * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @return compileResult */ public static CompileResult compileAndSetup(Object obj, String sourceRoot, String packageName) { CompileResult compileResult = compile(obj, sourceRoot, packageName); BRunUtil.invokePackageInit(compileResult, packageName); return compileResult; } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source module/file * @return Semantic errors */ public static CompileResult compileAndGetBIR(String sourceFilePath) { return compile(sourceFilePath, CompilerPhase.BIR_GEN); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source module/file * @return Semantic errors */ public static CompileResult compile(String sourceFilePath) { if (jBallerinaTestsEnabled()) { return compileOnJBallerina(sourceFilePath); } return compile(sourceFilePath, CompilerPhase.CODE_GEN); } static boolean jBallerinaTestsEnabled() { String value = System.getProperty(ENABLE_JBALLERINA_TESTS); return value != null && Boolean.valueOf(value); } public static CompileResult compileWithoutExperimentalFeatures(String sourceFilePath) { return compile(sourceFilePath, CompilerPhase.CODE_GEN, false); } public static CompileResult compile(String sourceFilePath, boolean isSiddhiRuntimeEnabled) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); return compile(sourceRoot.toString(), packageName, CompilerPhase.CODE_GEN, isSiddhiRuntimeEnabled, true); } /** * Compile and return the semantic errors. * * @param obj this is to find the original callers location. * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @return Semantic errors */ public static CompileResult compile(Object obj, String sourceRoot, String packageName) { try { String effectiveSource; CodeSource codeSource = obj.getClass().getProtectionDomain().getCodeSource(); URL location = codeSource.getLocation(); URI locationUri = location.toURI(); Path pathLocation = Paths.get(locationUri); String filePath = concatFileName(sourceRoot, pathLocation); Path rootPath = Paths.get(filePath); Path packagePath = Paths.get(packageName); if (Files.isDirectory(packagePath)) { String[] pkgParts = packageName.split("\\/"); List<Name> pkgNameComps = Arrays.stream(pkgParts) .map(part -> { if (part.equals("")) { return Names.EMPTY; } else if (part.equals("_")) { return Names.EMPTY; } return new Name(part); }) .collect(Collectors.toList()); PackageID pkgId = new PackageID(Names.ANON_ORG, pkgNameComps, Names.DEFAULT_VERSION); effectiveSource = pkgId.getName().getValue(); return compile(rootPath.toString(), effectiveSource, CompilerPhase.CODE_GEN); } else { effectiveSource = packageName; return compile(rootPath.toString(), effectiveSource, CompilerPhase.CODE_GEN, new FileSystemProjectDirectory(rootPath)); } } catch (URISyntaxException e) { throw new IllegalArgumentException("error while running test: " + e.getMessage()); } } /** * <p> * concatenates a given filename to the provided path in directory. * </p> * <p> * <b>Note : </b> this function is relevant since in Unix the directory would be separated from backslash and * in unix the folder will be separated from forward slash. * </p> * * @param fileName name of the file. * @param pathLocation location of the directory. * @return the path with directoryName + file. */ public static String concatFileName(String fileName, Path pathLocation) { final String windowsFolderSeparator = "\\"; final String unixFolderSeparator = "/"; StringBuilder path = new StringBuilder(pathLocation.toAbsolutePath().toString()); if (pathLocation.endsWith(windowsFolderSeparator)) { path = path.append(windowsFolderSeparator).append(fileName); } else { path = path.append(unixFolderSeparator).append(fileName); } return path.toString(); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source package/file * @param compilerPhase Compiler phase * @param enableExpFeatures Flag indicating to enable the experimental feature * @return Semantic errors */ public static CompileResult compile(String sourceFilePath, CompilerPhase compilerPhase, boolean enableExpFeatures) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); return compile(sourceRoot.toString(), packageName, compilerPhase, enableExpFeatures); } /** * Compile and return the semantic errors. * * @param sourceFilePath Path to source package/file * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compile(String sourceFilePath, CompilerPhase compilerPhase) { return compile(sourceFilePath, compilerPhase, true); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @param enableExpFeatures Flag indicating to enable the experimental features * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return compile(context, packageName, compilerPhase, false); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @param isSiddhiRuntimeEnabled Flag indicating to enable siddhi runtime for stream processing * @param enableExpFeatures Flag indicating to enable the experimental features * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, boolean isSiddhiRuntimeEnabled, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.SIDDHI_RUNTIME_ENABLED, Boolean.toString(isSiddhiRuntimeEnabled)); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return compile(context, packageName, compilerPhase, false); } /** * Compile and return the semantic errors. * * @param sourceRoot root path of the modules * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase) { return compile(sourceRoot, packageName, compilerPhase, true); } /** * Compile with tests and return the semantic errors. * * @param context Compiler Context * @param packageName name of the module to compile * @param compilerPhase Compiler phase * @return Semantic errors */ public static CompileResult compileWithTests(CompilerContext context, String packageName, CompilerPhase compilerPhase) { return compile(context, packageName, compilerPhase, true); } /** * Create a compiler context. * * @param sourceRoot source root or project directory path * @param compilerPhase Compiler phase * @return new compiler context object */ public static CompilerContext createCompilerContext(String sourceRoot, CompilerPhase compilerPhase) { return createCompilerContext(sourceRoot, compilerPhase, Boolean.TRUE); } public static CompilerContext createCompilerContext(String sourceRoot, CompilerPhase compilerPhase, boolean enableExpFeatures) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(TEST_ENABLED, "true"); options.put(SKIP_TESTS, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures)); return context; } public static CompileResult compile(String sourceRoot, String packageName, CompilerPhase compilerPhase, SourceDirectory sourceDirectory) { CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, compilerPhase.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); context.put(SourceDirectory.class, sourceDirectory); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage packageNode = compiler.compile(packageName); comResult.setAST(packageNode); CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(packageNode); if (programFile != null) { comResult.setProgFile(LauncherUtils.getExecutableProgram(programFile)); } return comResult; } private static CompileResult compile(CompilerContext context, String packageName, CompilerPhase compilerPhase, boolean withTests) { CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage packageNode = compiler.compile(packageName, true); comResult.setAST(packageNode); if (comResult.getErrorCount() > 0) { return comResult; } else if (CompilerPhase.CODE_GEN.compareTo(compilerPhase) > 0 || compilerPhase == CompilerPhase.BIR_GEN) { return comResult; } CompiledBinaryFile.ProgramFile programFile; if (withTests && packageNode.containsTestablePkg()) { programFile = compiler.getExecutableProgram(packageNode.getTestablePkg()); } else { programFile = compiler.getExecutableProgram(packageNode); } if (programFile != null) { ProgramFile pFile = LauncherUtils.getExecutableProgram(programFile); comResult.setProgFile(pFile); } return comResult; } /** * Compile and return the compiled package node. * * @param sourceFilePath Path to source module/file * @return compiled module node */ public static BLangPackage compileAndGetPackage(String sourceFilePath) { Path sourcePath = Paths.get(sourceFilePath); String packageName = sourcePath.getFileName().toString(); Path sourceRoot = resourceDir.resolve(sourcePath.getParent()); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, resourceDir.resolve(sourceRoot).toString()); options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); return compiler.compile(packageName); } /** * Compile and run a ballerina file. * * @param sourceFilePath Path to the ballerina file. * @param functionName The name of the function to run */ public static void run(String sourceFilePath, String functionName) { CompileResult result = compile(sourceFilePath); ProgramFile programFile = result.getProgFile(); if (MAIN_FUNCTION_NAME.equals(functionName) && !programFile.isMainEPAvailable() && !programFile.isServiceEPAvailable()) { throw new RuntimeException("main function not found in '" + programFile.getProgramFilePath() + "'"); } if (programFile.isMainEPAvailable() || !MAIN_FUNCTION_NAME.equals(functionName)) { LauncherUtils.runMain(programFile, functionName, new String[0], false); } else { LauncherUtils.runServices(programFile); } } public static String readFileAsString(String path) throws IOException { InputStream is = ClassLoader.getSystemResourceAsStream(path); InputStreamReader inputStreamREader = null; BufferedReader br = null; StringBuilder sb = new StringBuilder(); try { inputStreamREader = new InputStreamReader(is, StandardCharsets.UTF_8); br = new BufferedReader(inputStreamREader); String content = br.readLine(); if (content == null) { return sb.toString(); } sb.append(content); while ((content = br.readLine()) != null) { sb.append('\n').append(content); } } finally { if (inputStreamREader != null) { try { inputStreamREader.close(); } catch (IOException ignore) { } } if (br != null) { try { br.close(); } catch (IOException ignore) { } } } return sb.toString(); } public static BMap<String, BValue> createAndGetStruct(ProgramFile programFile, String packagePath, String structName) { PackageInfo structPackageInfo = programFile.getPackageInfo(packagePath); StructureTypeInfo typeInfo = structPackageInfo.getStructInfo(structName); return BLangVMStructs.createBStruct(typeInfo); } /** * Used by IntelliJ IDEA plugin to provide semantic analyzing capability. * * @param classLoader a {@link ClassLoader} to be set as thread context class loader. This is used by {@link * java.util.ServiceLoader}. Otherwise semantic analyzing capability providing wont work since it * cant find core package. * @param sourceRoot source root of a project * @param fileName either the file name (if in project root) or the module name * @return list of diagnostics */ public static List<Diagnostic> getDiagnostics(ClassLoader classLoader, String sourceRoot, String fileName) { Thread.currentThread().setContextClassLoader(classLoader); CompilerContext context = new CompilerContext(); CompilerOptions options = CompilerOptions.getInstance(context); options.put(PROJECT_DIR, sourceRoot); options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString()); options.put(PRESERVE_WHITESPACE, "false"); options.put(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED, Boolean.TRUE.toString()); CompileResult comResult = new CompileResult(); DiagnosticListener listener = comResult::addDiagnostic; context.put(DiagnosticListener.class, listener); Compiler compiler = Compiler.getInstance(context); BLangPackage entryPackageNode = compiler.compile(fileName); CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPackageNode); if (programFile != null) { comResult.setProgFile(LauncherUtils.getExecutableProgram(programFile)); } Diagnostic[] diagnostics = comResult.getDiagnostics(); return Arrays.stream(diagnostics).collect(Collectors.toList()); } }
can array_generate accept only one argument? if not so, node.getChildren().size() < 2
public Void visitFunctionCall(FunctionCallExpr node, Scope scope) { Type[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new); if (node.isNondeterministicBuiltinFnName()) { ExprId exprId = analyzeState.getNextNondeterministicId(); node.setNondeterministicId(exprId); } Function fn; String fnName = node.getFnName().getFunction(); if (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) { fn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]}, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); fn.setArgsType(argumentTypes); fn.setIsNullable(false); } else if (fnName.equals(FunctionSet.TIME_SLICE) || fnName.equals(FunctionSet.DATE_SLICE)) { if (!(node.getChild(1) instanceof IntLiteral)) { throw new SemanticException( fnName + " requires second parameter must be a constant interval"); } if (((IntLiteral) node.getChild(1)).getValue() <= 0) { throw new SemanticException( fnName + " requires second parameter must be greater than 0"); } fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (FunctionSet.decimalRoundFunctions.contains(fnName) || Arrays.stream(argumentTypes).anyMatch(Type::isDecimalV3)) { if (FunctionSet.varianceFunctions.contains(fnName)) { Type[] doubleArgTypes = Stream.of(argumentTypes).map(t -> Type.DOUBLE).toArray(Type[]::new); fn = Expr.getBuiltinFunction(fnName, doubleArgTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else { fn = getDecimalV3Function(node, argumentTypes); } } else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn instanceof AggregateFunction) { throw new SemanticException("Time Type can not used in %s function", fnName); } } else if (FunctionSet.STR_TO_DATE.equals(fnName)) { fn = getStrToDateFunction(node, argumentTypes); } else if (fnName.equals(FunctionSet.ARRAY_FILTER)) { if (node.getChildren().size() != 2) { throw new SemanticException(fnName + " should have 2 array inputs or lambda functions."); } if (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) { throw new SemanticException("The first input of " + fnName + " should be an array or a lambda function."); } if (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) { throw new SemanticException("The second input of " + fnName + " should be an array or a lambda function."); } if (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) { throw new SemanticException("The second input of array_filter " + node.getChild(1).getType().toString() + " can't cast to ARRAY<BOOL>"); } node.setChild(1, new CastExpr(Type.ARRAY_BOOLEAN, node.getChild(1))); argumentTypes[1] = Type.ARRAY_BOOLEAN; fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (fnName.equals(FunctionSet.ARRAY_SORTBY)) { if (node.getChildren().size() != 2) { throw new SemanticException(fnName + " should have 2 array inputs or lambda functions."); } if (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) { throw new SemanticException("The first input of " + fnName + " should be an array or a lambda function."); } if (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) { throw new SemanticException("The second input of " + fnName + " should be an array or a lambda function."); } fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (fnName.equals(FunctionSet.ARRAY_SLICE)) { for (int i = 1; i < argumentTypes.length; i++) { argumentTypes[i] = Type.BIGINT; } fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF); } else if (fnName.equals(FunctionSet.ARRAY_CONCAT)) { if (node.getChildren().size() < 2) { throw new SemanticException(fnName + " should have at least two inputs"); } fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (fnName.equals("array_generate")) { if (node.getChildren().size() < 1 || node.getChildren().size() > 3) { throw new SemanticException(fnName + " has wrong input numbers"); } for (Expr expr : node.getChildren()) { if ((expr instanceof SlotRef) && node.getChildren().size() != 3) { throw new SemanticException(fnName + " with IntColumn doesn't support default parameters"); } if (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) && !(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) { throw new SemanticException(fnName + "'s parameter only support Integer"); } } if (node.getChildren().size() == 1) { LiteralExpr secondParam = (LiteralExpr) node.getChild(0); node.clearChildren(); try { node.addChild(new IntLiteral("1", Type.TINYINT)); node.addChild(secondParam); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } if (node.getChildren().size() == 2) { int idx = 0; BigInteger[] childValues = new BigInteger[2]; for (Expr expr : node.getChildren()) { if (expr instanceof NullLiteral) { throw new SemanticException(fnName + "'s parameter only support Integer"); } else if (expr instanceof IntLiteral) { childValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue()); } else { childValues[idx++] = ((LargeIntLiteral) expr).getValue(); } } if (childValues[0].compareTo(childValues[1]) < 0) { node.addChild(new IntLiteral(1)); } else { node.addChild(new IntLiteral(-1)); } } argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new); fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF); } else { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } if (fn == null) { fn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes); } if (fn == null) { throw new SemanticException("No matching function with signature: %s(%s).", fnName, node.getParams().isStar() ? "*" : Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); } if (fn instanceof TableFunction) { throw unsupportedException("Table function cannot be used in expression"); } for (int i = 0; i < fn.getNumArgs(); i++) { if (!argumentTypes[i].matchesType(fn.getArgs()[i]) && !Type.canCastToAsFunctionParameter(argumentTypes[i], fn.getArgs()[i])) { throw new SemanticException("No matching function with signature: %s(%s).", fnName, node.getParams().isStar() ? "*" : Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); } } node.setFn(fn); node.setType(fn.getReturnType()); FunctionAnalyzer.analyze(node); return null; }
if (node.getChildren().size() < 1 || node.getChildren().size() > 3) {
public Void visitFunctionCall(FunctionCallExpr node, Scope scope) { Type[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new); if (node.isNondeterministicBuiltinFnName()) { ExprId exprId = analyzeState.getNextNondeterministicId(); node.setNondeterministicId(exprId); } Function fn; String fnName = node.getFnName().getFunction(); checkFunction(fnName, node); if (fnName.equals(FunctionSet.COUNT) && node.getParams().isDistinct()) { fn = Expr.getBuiltinFunction(FunctionSet.COUNT, new Type[] {argumentTypes[0]}, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } else if (fnName.equals(FunctionSet.EXCHANGE_BYTES) || fnName.equals(FunctionSet.EXCHANGE_SPEED)) { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); fn.setArgsType(argumentTypes); fn.setIsNullable(false); } else if (DecimalV3FunctionAnalyzer.argumentTypeContainDecimalV3(fnName, argumentTypes)) { fn = DecimalV3FunctionAnalyzer.getDecimalV3Function(session, node, argumentTypes); } else if (Arrays.stream(argumentTypes).anyMatch(arg -> arg.matchesType(Type.TIME))) { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn instanceof AggregateFunction) { throw new SemanticException("Time Type can not used in" + fnName + " function", node.getPos()); } } else if (FunctionSet.STR_TO_DATE.equals(fnName)) { fn = getStrToDateFunction(node, argumentTypes); } else if (FunctionSet.ARRAY_GENERATE.equals(fnName)) { fn = getArrayGenerateFunction(node); argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new); } else { fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } if (fn == null) { fn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes); } if (fn == null) { String msg = String.format("No matching function with signature: %s(%s)", fnName, node.getParams().isStar() ? "*" : Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); throw new SemanticException(msg, node.getPos()); } if (fn instanceof TableFunction) { throw new SemanticException("Table function cannot be used in expression", node.getPos()); } for (int i = 0; i < fn.getNumArgs(); i++) { if (!argumentTypes[i].matchesType(fn.getArgs()[i]) && !Type.canCastToAsFunctionParameter(argumentTypes[i], fn.getArgs()[i])) { String msg = String.format("No matching function with signature: %s(%s)", fnName, node.getParams().isStar() ? "*" : Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.joining(", "))); throw new SemanticException(msg, node.getPos()); } } if (fn.hasVarArgs()) { Type varType = fn.getArgs()[fn.getNumArgs() - 1]; for (int i = fn.getNumArgs(); i < argumentTypes.length; i++) { if (!argumentTypes[i].matchesType(varType) && !Type.canCastToAsFunctionParameter(argumentTypes[i], varType)) { String msg = String.format("Variadic function %s(%s) can't support type: %s", fnName, Arrays.stream(fn.getArgs()).map(Type::toSql).collect(Collectors.joining(", ")), argumentTypes[i]); throw new SemanticException(msg, node.getPos()); } } } node.setFn(fn); node.setType(fn.getReturnType()); FunctionAnalyzer.analyze(node); return null; }
class Visitor extends AstVisitor<Void, Scope> { private static final List<String> ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD, FunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD); private static final List<String> SUB_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE, FunctionSet.DAYS_SUB); private final AnalyzeState analyzeState; private final ConnectContext session; public Visitor(AnalyzeState analyzeState, ConnectContext session) { this.analyzeState = analyzeState; this.session = session; } @Override public Void visitExpression(Expr node, Scope scope) { throw unsupportedException("not yet implemented: expression analyzer for " + node.getClass().getName()); } private void handleResolvedField(SlotRef slot, ResolvedField resolvedField) { analyzeState.addColumnReference(slot, FieldId.from(resolvedField)); } @Override public Void visitSubfieldExpr(SubfieldExpr node, Scope scope) { Expr child = node.getChild(0); Preconditions.checkArgument(child.getType().isStructType(), String.format("%s must be a struct type, check if you are using `'`", child.toSql())); List<String> fieldNames = node.getFieldNames(); Type tmpType = child.getType(); for (String fieldName : fieldNames) { StructType structType = (StructType) tmpType; StructField structField = structType.getField(fieldName); if (structField == null) { throw new SemanticException("Struct subfield '%s' cannot be resolved", fieldName); } tmpType = structField.getType(); } node.setType(tmpType); return null; } @Override public Void visitSlot(SlotRef node, Scope scope) { ResolvedField resolvedField = scope.resolveField(node); node.setType(resolvedField.getField().getType()); node.setTblName(resolvedField.getField().getRelationAlias()); if (node.getType().isStructType()) { node.setCol(resolvedField.getField().getName()); node.setLabel(resolvedField.getField().getName()); if (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) { node.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos()); node.resetStructInfo(); } } handleResolvedField(node, resolvedField); return null; } @Override public Void visitFieldReference(FieldReference node, Scope scope) { Field field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex()); node.setType(field.getType()); return null; } @Override public Void visitArrayExpr(ArrayExpr node, Scope scope) { if (!node.getChildren().isEmpty()) { try { Type targetItemType; if (node.getType() != null) { targetItemType = ((ArrayType) node.getType()).getItemType(); } else { targetItemType = TypeManager.getCommonSuperType( node.getChildren().stream().map(Expr::getType).collect(Collectors.toList())); } for (int i = 0; i < node.getChildren().size(); i++) { if (!node.getChildren().get(i).getType().matchesType(targetItemType)) { node.castChild(targetItemType, i); } } node.setType(new ArrayType(targetItemType)); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } else { node.setType(Type.ARRAY_NULL); } return null; } @Override public Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) { Expr expr = node.getChild(0); Expr subscript = node.getChild(1); if (!expr.getType().isArrayType() && !expr.getType().isMapType()) { throw new SemanticException("cannot subscript type " + expr.getType() + " because it is not an array or a map"); } if (expr.getType().isArrayType()) { if (!subscript.getType().isNumericType()) { throw new SemanticException("array subscript must have type integer"); } try { if (subscript.getType().getPrimitiveType() != PrimitiveType.INT) { node.castChild(Type.INT, 1); } node.setType(((ArrayType) expr.getType()).getItemType()); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } else { try { if (subscript.getType().getPrimitiveType() != ((MapType) expr.getType()).getKeyType().getPrimitiveType()) { node.castChild(((MapType) expr.getType()).getKeyType(), 1); } node.setType(((MapType) expr.getType()).getValueType()); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } return null; } @Override public Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) { if (!node.getChild(0).getType().isArrayType()) { throw new SemanticException("cannot subscript type" + node.getChild(0).getType() + " because it is not an array"); } node.setType(node.getChild(0).getType()); return null; } @Override public Void visitArrowExpr(ArrowExpr node, Scope scope) { Expr item = node.getChild(0); Expr key = node.getChild(1); if (!key.isLiteral() || !key.getType().isStringType()) { throw new SemanticException("right operand of -> should be string literal, but got " + key); } if (!item.getType().isJsonType()) { throw new SemanticException( "-> operator could only be used for json column, but got " + item.getType()); } node.setType(Type.JSON); return null; } @Override public Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) { if (scope.getLambdaInputs().size() == 0) { throw new SemanticException("Lambda Functions can only be used in high-order functions with arrays."); } if (scope.getLambdaInputs().size() != node.getChildren().size() - 1) { throw new SemanticException("Lambda arguments should equal to lambda input arrays."); } Set<String> set = new HashSet<>(); List<LambdaArgument> args = Lists.newArrayList(); for (int i = 1; i < node.getChildren().size(); ++i) { args.add((LambdaArgument) node.getChild(i)); String name = ((LambdaArgument) node.getChild(i)).getName(); if (set.contains(name)) { throw new SemanticException("Lambda argument: " + name + " is duplicated."); } set.add(name); ((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable()); node.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType()); } Scope lambdaScope = new Scope(args, scope); ExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session); node.setType(Type.FUNCTION); scope.clearLambdaInputs(); return null; } @Override public Void visitCompoundPredicate(CompoundPredicate node, Scope scope) { for (int i = 0; i < node.getChildren().size(); i++) { Type type = node.getChild(i).getType(); if (!type.isBoolean() && !type.isNull()) { throw new SemanticException("Operand '%s' part of predicate " + "'%s' should return type 'BOOLEAN' but returns type '%s'.", AstToStringBuilder.toString(node), AstToStringBuilder.toString(node.getChild(i)), type.toSql()); } } node.setType(Type.BOOLEAN); return null; } @Override public Void visitBetweenPredicate(BetweenPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Type> list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList()); Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list); for (Type type : list) { if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException( "between predicate type " + type.toSql() + " with type " + compatibleType.toSql() + " is invalid."); } } return null; } @Override public Void visitBinaryPredicate(BinaryPredicate node, Scope scope) { Type type1 = node.getChild(0).getType(); Type type2 = node.getChild(1).getType(); Type compatibleType = TypeManager.getCompatibleTypeForBinary(node.getOp().isNotRangeComparison(), type1, type2); final String ERROR_MSG = "Column type %s does not support binary predicate operation."; if (!Type.canCastTo(type1, compatibleType)) { throw new SemanticException(String.format(ERROR_MSG, type1.toSql())); } if (!Type.canCastTo(type2, compatibleType)) { throw new SemanticException(String.format(ERROR_MSG, type1.toSql())); } node.setType(Type.BOOLEAN); return null; } @Override public Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) { if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) { ArithmeticExpr.Operator op = node.getOp(); Type t1 = node.getChild(0).getType().getNumResultType(); Type t2 = node.getChild(1).getType().getNumResultType(); if (t1.isDecimalV3() || t2.isDecimalV3()) { try { node.rewriteDecimalOperation(); } catch (AnalysisException ex) { throw new SemanticException(ex.getMessage()); } Type lhsType = node.getChild(0).getType(); Type rhsType = node.getChild(1).getType(); Type resultType = node.getType(); Type[] args = {lhsType, rhsType}; Function fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL); Function newFn = new ScalarFunction(fn.getFunctionName(), args, resultType, fn.hasVarArgs()); node.setType(resultType); node.setFn(newFn); return null; } Type lhsType; Type rhsType; switch (op) { case MULTIPLY: case ADD: case SUBTRACT: lhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2)); rhsType = lhsType; break; case MOD: lhsType = ArithmeticExpr.getCommonType(t1, t2); rhsType = lhsType; break; case DIVIDE: lhsType = ArithmeticExpr.getCommonType(t1, t2); if (lhsType.isFixedPointType()) { lhsType = Type.DOUBLE; } rhsType = lhsType; break; case INT_DIVIDE: case BITAND: case BITOR: case BITXOR: lhsType = ArithmeticExpr.getCommonType(t1, t2); if (!lhsType.isFixedPointType()) { lhsType = Type.BIGINT; } rhsType = lhsType; break; case BIT_SHIFT_LEFT: case BIT_SHIFT_RIGHT: case BIT_SHIFT_RIGHT_LOGICAL: lhsType = t1; rhsType = Type.BIGINT; break; default: throw unsupportedException("Unknown arithmetic operation " + op + " in: " + node); } if (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) { lhsType = Type.NULL; rhsType = Type.NULL; } if (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) { throw new SemanticException( "cast type " + node.getChild(0).getType().toSql() + " with type " + lhsType.toSql() + " is invalid."); } if (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) { throw new SemanticException( "cast type " + node.getChild(1).getType().toSql() + " with type " + rhsType.toSql() + " is invalid."); } Function fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType}, Function.CompareMode.IS_SUPERTYPE_OF); /* * commonType is the common type of the parameters of the function, * and fn.getReturnType() is the return type of the function after execution * So we use fn.getReturnType() as node type */ node.setType(fn.getReturnType()); node.setFn(fn); } else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) { Function fn = Expr.getBuiltinFunction( node.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF); node.setType(Type.BIGINT); node.setFn(fn); } else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) { throw unsupportedException("not yet implemented: expression analyzer for " + node.getClass().getName()); } else { throw unsupportedException("not yet implemented: expression analyzer for " + node.getClass().getName()); } return null; } List<String> addDateFunctions = Lists.newArrayList(FunctionSet.DATE_ADD, FunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD); List<String> subDateFunctions = Lists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE, FunctionSet.DAYS_SUB); @Override public Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) { node.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME)); String funcOpName; if (node.getFuncName() != null) { if (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "add"); } else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "sub"); } else { node.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME)); funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "diff"); } } else { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), (node.getOp() == ArithmeticExpr.Operator.ADD) ? "add" : "sub"); } Type[] argumentTypes = node.getChildren().stream().map(Expr::getType) .toArray(Type[]::new); Function fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn == null) { throw new SemanticException("No matching function with signature: %s(%s).", funcOpName, Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); } node.setType(fn.getReturnType()); node.setFn(fn); return null; } @Override public Void visitExistsPredicate(ExistsPredicate node, Scope scope) { predicateBaseAndCheck(node); return null; } @Override public Void visitInPredicate(InPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Expr> queryExpressions = Lists.newArrayList(); node.collect(arg -> arg instanceof Subquery, queryExpressions); if (queryExpressions.size() > 0 && node.getChildren().size() > 2) { throw new SemanticException("In Predicate only support literal expression list"); } List<Type> list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList()); Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list); for (Type type : list) { if (type.isJsonType()) { throw new SemanticException("InPredicate of JSON is not supported"); } if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException( "in predicate type " + type.toSql() + " with type " + compatibleType.toSql() + " is invalid."); } } return null; } @Override public Void visitMultiInPredicate(MultiInPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Type> leftTypes = node.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType) .collect(Collectors.toList()); Subquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns()); List<Type> rightTypes = inSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType). collect(Collectors.toList()); if (leftTypes.size() != rightTypes.size()) { throw new SemanticException( "subquery must return the same number of columns as provided by the IN predicate"); } for (int i = 0; i < rightTypes.size(); ++i) { if (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() || rightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() || rightTypes.get(i).isStructType()) { throw new SemanticException("InPredicate of JSON, Map, Struct types is not supported"); } if (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) { throw new SemanticException( "in predicate type " + leftTypes.get(i).toSql() + " with type " + rightTypes.get(i).toSql() + " is invalid."); } } return null; } @Override public Void visitLiteral(LiteralExpr node, Scope scope) { if (node instanceof LargeIntLiteral) { BigInteger value = ((LargeIntLiteral) node).getValue(); if (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 || value.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) { throw new SemanticException("Number Overflow. literal: " + value); } } return null; } @Override public Void visitIsNullPredicate(IsNullPredicate node, Scope scope) { predicateBaseAndCheck(node); return null; } @Override public Void visitLikePredicate(LikePredicate node, Scope scope) { predicateBaseAndCheck(node); Type type1 = node.getChild(0).getType(); Type type2 = node.getChild(1).getType(); if (!type1.isStringType() && !type1.isNull()) { throw new SemanticException( "left operand of " + node.getOp().toString() + " must be of type STRING: " + AstToStringBuilder.toString(node)); } if (!type2.isStringType() && !type2.isNull()) { throw new SemanticException( "right operand of " + node.getOp().toString() + " must be of type STRING: " + AstToStringBuilder.toString(node)); } if (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) { try { Pattern.compile(((StringLiteral) node.getChild(1)).getValue()); } catch (PatternSyntaxException e) { throw new SemanticException( "Invalid regular expression in '" + AstToStringBuilder.toString(node) + "'"); } } return null; } private void predicateBaseAndCheck(Predicate node) { node.setType(Type.BOOLEAN); for (Expr expr : node.getChildren()) { if (expr.getType().isOnlyMetricType() || (expr.getType().isComplexType() && !(node instanceof IsNullPredicate))) { throw new SemanticException( "HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate"); } } } @Override public Void visitCastExpr(CastExpr cast, Scope context) { Type castType; if (cast.isImplicit()) { castType = cast.getType(); } else { castType = cast.getTargetTypeDef().getType(); } if (!Type.canCastTo(cast.getChild(0).getType(), castType)) { throw new SemanticException("Invalid type cast from " + cast.getChild(0).getType().toSql() + " to " + castType.toSql() + " in sql `" + AstToStringBuilder.toString(cast.getChild(0)).replace("%", "%%") + "`"); } cast.setType(castType); return null; } @Override private Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) { /* * @TODO: Determine the return type of this function * If is format is constant and don't contains time part, return date type, to compatible with mysql. * In fact we don't want to support str_to_date return date like mysql, reason: * 1. The return type of FE/BE str_to_date function signature is datetime, return date * let type different, it's will throw unpredictable error * 2. Support return date and datetime at same time in one function is complicated. * 3. The meaning of the function is confusing. In mysql, will return date if format is a constant * string and it's not contains "%H/%M/%S" pattern, but it's a trick logic, if format is a variable * expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be * datetime. */ Function fn = Expr.getBuiltinFunction(node.getFnName().getFunction(), argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn == null) { return null; } if (!node.getChild(1).isConstant()) { return fn; } ExpressionMapping expressionMapping = new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()), com.google.common.collect.Lists.newArrayList()); ScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping, new ColumnRefFactory()); if (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) { return Expr.getBuiltinFunction("str2date", argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } return fn; } Function getDecimalV3Function(FunctionCallExpr node, Type[] argumentTypes) { Function fn; String fnName = node.getFnName().getFunction(); Type commonType = DecimalV3FunctionAnalyzer.normalizeDecimalArgTypes(argumentTypes, fnName); fn = Expr.getBuiltinFunction(fnName, argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn == null) { fn = AnalyzerUtils.getUdfFunction(session, node.getFnName(), argumentTypes); } if (fn == null) { throw new SemanticException("No matching function with signature: %s(%s).", fnName, node.getParams().isStar() ? "*" : Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); } if (DecimalV3FunctionAnalyzer.DECIMAL_AGG_FUNCTION.contains(fnName)) { Type argType = node.getChild(0).getType(); if (DecimalV3FunctionAnalyzer.DECIMAL_AGG_VARIANCE_STDDEV_TYPE .contains(fnName) && argType.isDecimalV3()) { argType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 9); node.setChild(0, TypeManager.addCastExpr(node.getChild(0), argType)); } fn = DecimalV3FunctionAnalyzer .rectifyAggregationFunction((AggregateFunction) fn, argType, commonType); } else if (DecimalV3FunctionAnalyzer.DECIMAL_UNARY_FUNCTION_SET.contains(fnName) || DecimalV3FunctionAnalyzer.DECIMAL_IDENTICAL_TYPE_FUNCTION_SET.contains(fnName) || FunctionSet.IF.equals(fnName) || FunctionSet.MAX_BY.equals(fnName)) { List<Type> argTypes; if (FunctionSet.MONEY_FORMAT.equals(fnName)) { argTypes = Arrays.asList(argumentTypes); } else { argTypes = Arrays.stream(fn.getArgs()).map(t -> t.isDecimalV3() ? commonType : t) .collect(Collectors.toList()); } Type returnType = fn.getReturnType(); if (returnType.isDecimalV3() && commonType.isValid()) { returnType = commonType; } if (FunctionSet.MAX_BY.equals(fnName)) { AggregateFunction newFn = new AggregateFunction(fn.getFunctionName(), Arrays.asList(argumentTypes), returnType, Type.VARCHAR, fn.hasVarArgs()); newFn.setFunctionId(fn.getFunctionId()); newFn.setChecksum(fn.getChecksum()); newFn.setBinaryType(fn.getBinaryType()); newFn.setHasVarArgs(fn.hasVarArgs()); newFn.setId(fn.getId()); newFn.setUserVisible(fn.isUserVisible()); newFn.setisAnalyticFn(true); fn = newFn; return fn; } ScalarFunction newFn = new ScalarFunction(fn.getFunctionName(), argTypes, returnType, fn.getLocation(), ((ScalarFunction) fn).getSymbolName(), ((ScalarFunction) fn).getPrepareFnSymbol(), ((ScalarFunction) fn).getCloseFnSymbol()); newFn.setFunctionId(fn.getFunctionId()); newFn.setChecksum(fn.getChecksum()); newFn.setBinaryType(fn.getBinaryType()); newFn.setHasVarArgs(fn.hasVarArgs()); newFn.setId(fn.getId()); newFn.setUserVisible(fn.isUserVisible()); fn = newFn; } else if (FunctionSet.decimalRoundFunctions.contains(fnName)) { List<Type> argTypes = Arrays.stream(fn.getArgs()).map(t -> t.isDecimalV3() ? commonType : t) .collect(Collectors.toList()); fn = DecimalV3FunctionAnalyzer.getFunctionOfRound(node, fn, argTypes); } return fn; } @Override public Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) { if (node.getChildren().size() < 1) { throw new SemanticException("GROUPING functions required at least one parameters"); } if (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) { throw new SemanticException("grouping functions only support column."); } Type[] childTypes = new Type[1]; childTypes[0] = Type.BIGINT; Function fn = Expr.getBuiltinFunction(node.getFnName().getFunction(), childTypes, Function.CompareMode.IS_IDENTICAL); node.setFn(fn); node.setType(fn.getReturnType()); return null; } @Override public Void visitCaseWhenExpr(CaseExpr node, Scope context) { int start = 0; int end = node.getChildren().size(); Expr caseExpr = null; Expr elseExpr = null; if (node.hasCaseExpr()) { caseExpr = node.getChild(0); start++; } if (node.hasElseExpr()) { elseExpr = node.getChild(end - 1); end--; } if (node.getChildren().stream().anyMatch(d -> !d.getType().isScalarType())) { throw new SemanticException("case-when only support scalar type"); } List<Type> whenTypes = Lists.newArrayList(); if (null != caseExpr) { whenTypes.add(caseExpr.getType()); } for (int i = start; i < end; i = i + 2) { whenTypes.add(node.getChild(i).getType()); } Type compatibleType = Type.NULL; if (null != caseExpr) { compatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes); } for (Type type : whenTypes) { if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException("Invalid when type cast " + type.toSql() + " to " + compatibleType.toSql()); } } List<Type> thenTypes = Lists.newArrayList(); for (int i = start + 1; i < end; i = i + 2) { thenTypes.add(node.getChild(i).getType()); } if (null != elseExpr) { thenTypes.add(elseExpr.getType()); } Type returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN : TypeManager.getCompatibleTypeForCaseWhen(thenTypes); for (Type type : thenTypes) { if (!Type.canCastTo(type, returnType)) { throw new SemanticException("Invalid then type cast " + type.toSql() + " to " + returnType.toSql()); } } node.setType(returnType); return null; } @Override public Void visitSubquery(Subquery node, Scope context) { QueryAnalyzer queryAnalyzer = new QueryAnalyzer(session); queryAnalyzer.analyze(node.getQueryStatement(), context); node.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType()); return null; } @Override public Void visitAnalyticExpr(AnalyticExpr node, Scope context) { visit(node.getFnCall(), context); node.setType(node.getFnCall().getType()); if (node.getWindow() != null) { if (node.getWindow().getLeftBoundary() != null && node.getWindow().getLeftBoundary().getExpr() != null) { visit(node.getWindow().getLeftBoundary().getExpr(), context); } if (node.getWindow().getRightBoundary() != null && node.getWindow().getRightBoundary().getExpr() != null) { visit(node.getWindow().getRightBoundary().getExpr(), context); } } node.getPartitionExprs().forEach(e -> visit(e, context)); node.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context)); verifyAnalyticExpression(node); return null; } @Override public Void visitInformationFunction(InformationFunction node, Scope context) { String funcType = node.getFuncType(); if (funcType.equalsIgnoreCase("DATABASE") || funcType.equalsIgnoreCase("SCHEMA")) { node.setType(Type.VARCHAR); node.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase())); } else if (funcType.equalsIgnoreCase("USER")) { node.setType(Type.VARCHAR); node.setStrValue(session.getUserIdentity().toString()); } else if (funcType.equalsIgnoreCase("CURRENT_USER")) { node.setType(Type.VARCHAR); node.setStrValue(session.getCurrentUserIdentity().toString()); } else if (funcType.equalsIgnoreCase("CURRENT_ROLE")) { node.setType(Type.VARCHAR); PrivilegeManager manager = session.getGlobalStateMgr().getPrivilegeManager(); List<String> roleName = new ArrayList<>(); try { for (Long roleId : session.getCurrentRoleIds()) { RolePrivilegeCollection rolePrivilegeCollection = manager.getRolePrivilegeCollectionUnlocked(roleId, true); roleName.add(rolePrivilegeCollection.getName()); } } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } if (roleName.isEmpty()) { node.setStrValue("NONE"); } else { node.setStrValue(Joiner.on(", ").join(roleName)); } } else if (funcType.equalsIgnoreCase("CONNECTION_ID")) { node.setType(Type.BIGINT); node.setIntValue(session.getConnectionId()); node.setStrValue(""); } return null; } @Override public Void visitVariableExpr(VariableExpr node, Scope context) { try { if (node.getSetType().equals(SetType.USER)) { UserVariable userVariable = session.getUserVariables(node.getName()); if (userVariable == null) { node.setType(Type.STRING); node.setIsNull(); return null; } Type variableType = userVariable.getEvaluatedExpression().getType(); node.setType(variableType); if (userVariable.getEvaluatedExpression() instanceof NullLiteral) { node.setIsNull(); } else { node.setValue(userVariable.getEvaluatedExpression().getRealObjectValue()); } } else { VariableMgr.fillValue(session.getSessionVariable(), node); if (!Strings.isNullOrEmpty(node.getName()) && node.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) { node.setType(Type.VARCHAR); node.setValue(SqlModeHelper.decode((long) node.getValue())); } } } catch (AnalysisException | DdlException e) { throw new SemanticException(e.getMessage()); } return null; } @Override public Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) { node.setType(Type.VARCHAR); return null; } @Override public Void visitCloneExpr(CloneExpr node, Scope context) { return null; } }
class Visitor extends AstVisitor<Void, Scope> { private static final List<String> ADD_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_ADD, FunctionSet.ADDDATE, FunctionSet.DAYS_ADD, FunctionSet.TIMESTAMPADD); private static final List<String> SUB_DATE_FUNCTIONS = Lists.newArrayList(FunctionSet.DATE_SUB, FunctionSet.SUBDATE, FunctionSet.DAYS_SUB); private final AnalyzeState analyzeState; private final ConnectContext session; public Visitor(AnalyzeState analyzeState, ConnectContext session) { this.analyzeState = analyzeState; this.session = session; } @Override public Void visitExpression(Expr node, Scope scope) { throw new SemanticException("not yet implemented: expression analyzer for " + node.getClass().getName(), node.getPos()); } private void handleResolvedField(SlotRef slot, ResolvedField resolvedField) { analyzeState.addColumnReference(slot, FieldId.from(resolvedField)); } @Override public Void visitSubfieldExpr(SubfieldExpr node, Scope scope) { Expr child = node.getChild(0); if (!child.getType().isStructType()) { throw new SemanticException(child.toSql() + " must be a struct type, check if you are using `'`", child.getPos()); } List<String> fieldNames = node.getFieldNames(); Type tmpType = child.getType(); for (String fieldName : fieldNames) { StructType structType = (StructType) tmpType; StructField structField = structType.getField(fieldName); if (structField == null) { throw new SemanticException(String.format("Struct subfield '%s' cannot be resolved", fieldName), node.getPos()); } tmpType = structField.getType(); } node.setType(tmpType); return null; } @Override public Void visitSlot(SlotRef node, Scope scope) { ResolvedField resolvedField = scope.resolveField(node); node.setType(resolvedField.getField().getType()); node.setTblName(resolvedField.getField().getRelationAlias()); if (node.getType().isStructType()) { node.setCol(resolvedField.getField().getName()); node.setLabel(resolvedField.getField().getName()); if (resolvedField.getField().getTmpUsedStructFieldPos().size() > 0) { node.setUsedStructFieldPos(resolvedField.getField().getTmpUsedStructFieldPos()); node.resetStructInfo(); } } handleResolvedField(node, resolvedField); return null; } @Override public Void visitFieldReference(FieldReference node, Scope scope) { Field field = scope.getRelationFields().getFieldByIndex(node.getFieldIndex()); node.setType(field.getType()); return null; } @Override public Void visitArrayExpr(ArrayExpr node, Scope scope) { if (!node.getChildren().isEmpty()) { try { Type targetItemType; if (node.getType() != null) { targetItemType = ((ArrayType) node.getType()).getItemType(); } else { targetItemType = TypeManager.getCommonSuperType( node.getChildren().stream().map(Expr::getType).collect(Collectors.toList())); } for (int i = 0; i < node.getChildren().size(); i++) { if (!node.getChildren().get(i).getType().matchesType(targetItemType)) { node.castChild(targetItemType, i); } } node.setType(new ArrayType(targetItemType)); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } else { node.setType(Type.ARRAY_NULL); } return null; } @Override public Void visitMapExpr(MapExpr node, Scope scope) { if (!node.getChildren().isEmpty()) { Type keyType = Type.NULL; Type valueType = Type.NULL; if (node.getKeyExpr() != null) { keyType = node.getKeyExpr().getType(); } if (node.getValueExpr() != null) { valueType = node.getValueExpr().getType(); } node.setType(new MapType(keyType, valueType)); } else { node.setType(new MapType(Type.NULL, Type.NULL)); } return null; } @Override public Void visitCollectionElementExpr(CollectionElementExpr node, Scope scope) { Expr expr = node.getChild(0); Expr subscript = node.getChild(1); if (!expr.getType().isArrayType() && !expr.getType().isMapType()) { throw new SemanticException("cannot subscript type " + expr.getType() + " because it is not an array or a map", expr.getPos()); } if (expr.getType().isArrayType()) { if (!subscript.getType().isNumericType()) { throw new SemanticException("array subscript must have type integer", subscript.getPos()); } try { if (subscript.getType().getPrimitiveType() != PrimitiveType.INT) { node.castChild(Type.INT, 1); } node.setType(((ArrayType) expr.getType()).getItemType()); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } else { try { if (subscript.getType().getPrimitiveType() != ((MapType) expr.getType()).getKeyType().getPrimitiveType()) { node.castChild(((MapType) expr.getType()).getKeyType(), 1); } node.setType(((MapType) expr.getType()).getValueType()); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } return null; } @Override public Void visitArraySliceExpr(ArraySliceExpr node, Scope scope) { if (!node.getChild(0).getType().isArrayType()) { throw new SemanticException("cannot subscript type" + node.getChild(0).getType() + " because it is not an array", node.getChild(0).getPos()); } node.setType(node.getChild(0).getType()); return null; } @Override public Void visitArrowExpr(ArrowExpr node, Scope scope) { Expr item = node.getChild(0); Expr key = node.getChild(1); if (!key.isLiteral() || !key.getType().isStringType()) { throw new SemanticException("right operand of -> should be string literal, but got " + key, key.getPos()); } if (!item.getType().isJsonType()) { throw new SemanticException( "-> operator could only be used for json column, but got " + item.getType(), item.getPos()); } node.setType(Type.JSON); return null; } @Override public Void visitLambdaFunctionExpr(LambdaFunctionExpr node, Scope scope) { if (scope.getLambdaInputs().size() == 0) { throw new SemanticException( "Lambda Functions can only be used in high-order functions with arrays/maps", node.getPos()); } if (scope.getLambdaInputs().size() != node.getChildren().size() - 1) { throw new SemanticException("Lambda arguments should equal to lambda input arrays", node.getPos()); } Set<String> set = new HashSet<>(); List<LambdaArgument> args = Lists.newArrayList(); for (int i = 1; i < node.getChildren().size(); ++i) { args.add((LambdaArgument) node.getChild(i)); String name = ((LambdaArgument) node.getChild(i)).getName(); if (set.contains(name)) { throw new SemanticException("Lambda argument: " + name + " is duplicated", node.getChild(i).getPos()); } set.add(name); ((LambdaArgument) node.getChild(i)).setNullable(scope.getLambdaInputs().get(i - 1).isNullable()); node.getChild(i).setType(scope.getLambdaInputs().get(i - 1).getType()); } Scope lambdaScope = new Scope(args, scope); ExpressionAnalyzer.analyzeExpression(node.getChild(0), this.analyzeState, lambdaScope, this.session); node.setType(Type.FUNCTION); scope.clearLambdaInputs(); return null; } @Override public Void visitCompoundPredicate(CompoundPredicate node, Scope scope) { for (int i = 0; i < node.getChildren().size(); i++) { Type type = node.getChild(i).getType(); if (!type.isBoolean() && !type.isNull()) { String msg = String.format("Operand '%s' part of predicate " + "'%s' should return type 'BOOLEAN' but returns type '%s'", AstToStringBuilder.toString(node), AstToStringBuilder.toString(node.getChild(i)), type.toSql()); throw new SemanticException(msg, node.getChild(i).getPos()); } } node.setType(Type.BOOLEAN); return null; } @Override public Void visitBetweenPredicate(BetweenPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Type> list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList()); Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list); for (Type type : list) { if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException( "between predicate type " + type.toSql() + " with type " + compatibleType.toSql() + " is invalid", node.getPos()); } } return null; } @Override public Void visitBinaryPredicate(BinaryPredicate node, Scope scope) { Type type1 = node.getChild(0).getType(); Type type2 = node.getChild(1).getType(); Type compatibleType = TypeManager.getCompatibleTypeForBinary(node.getOp().isNotRangeComparison(), type1, type2); final String ERROR_MSG = "Column type %s does not support binary predicate operation"; if (!Type.canCastTo(type1, compatibleType)) { throw new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos()); } if (!Type.canCastTo(type2, compatibleType)) { throw new SemanticException(String.format(ERROR_MSG, type1.toSql()), node.getPos()); } node.setType(Type.BOOLEAN); return null; } @Override public Void visitArithmeticExpr(ArithmeticExpr node, Scope scope) { if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.BINARY_INFIX) { ArithmeticExpr.Operator op = node.getOp(); Type t1 = node.getChild(0).getType().getNumResultType(); Type t2 = node.getChild(1).getType().getNumResultType(); if (t1.isDecimalV3() || t2.isDecimalV3()) { try { node.rewriteDecimalOperation(); } catch (AnalysisException ex) { throw new SemanticException(ex.getMessage()); } Type lhsType = node.getChild(0).getType(); Type rhsType = node.getChild(1).getType(); Type resultType = node.getType(); Type[] args = {lhsType, rhsType}; Function fn = Expr.getBuiltinFunction(op.getName(), args, Function.CompareMode.IS_IDENTICAL); Function newFn = new ScalarFunction(fn.getFunctionName(), args, resultType, fn.hasVarArgs()); node.setType(resultType); node.setFn(newFn); return null; } Type lhsType; Type rhsType; switch (op) { case MULTIPLY: case ADD: case SUBTRACT: lhsType = ArithmeticExpr.getBiggerType(ArithmeticExpr.getCommonType(t1, t2)); rhsType = lhsType; break; case MOD: lhsType = ArithmeticExpr.getCommonType(t1, t2); rhsType = lhsType; break; case DIVIDE: lhsType = ArithmeticExpr.getCommonType(t1, t2); if (lhsType.isFixedPointType()) { lhsType = Type.DOUBLE; } rhsType = lhsType; break; case INT_DIVIDE: case BITAND: case BITOR: case BITXOR: lhsType = ArithmeticExpr.getCommonType(t1, t2); if (!lhsType.isFixedPointType()) { lhsType = Type.BIGINT; } rhsType = lhsType; break; case BIT_SHIFT_LEFT: case BIT_SHIFT_RIGHT: case BIT_SHIFT_RIGHT_LOGICAL: lhsType = t1; rhsType = Type.BIGINT; break; default: throw new SemanticException("Unknown arithmetic operation " + op + " in: " + node, node.getPos()); } if (node.getChild(0).getType().equals(Type.NULL) && node.getChild(1).getType().equals(Type.NULL)) { lhsType = Type.NULL; rhsType = Type.NULL; } if (lhsType.isInvalid() || rhsType.isInvalid()) { throw new SemanticException("Any function type can not cast to " + Type.INVALID.toSql()); } if (!Type.NULL.equals(node.getChild(0).getType()) && !Type.canCastTo(t1, lhsType)) { throw new SemanticException( "cast type " + node.getChild(0).getType().toSql() + " with type " + lhsType.toSql() + " is invalid", node.getPos()); } if (!Type.NULL.equals(node.getChild(1).getType()) && !Type.canCastTo(t2, rhsType)) { throw new SemanticException( "cast type " + node.getChild(1).getType().toSql() + " with type " + rhsType.toSql() + " is invalid", node.getPos()); } Function fn = Expr.getBuiltinFunction(op.getName(), new Type[] {lhsType, rhsType}, Function.CompareMode.IS_SUPERTYPE_OF); if (fn == null) { throw new SemanticException(String.format( "No matching function '%s' with operand types %s and %s", node.getOp().getName(), t1, t2)); } /* * commonType is the common type of the parameters of the function, * and fn.getReturnType() is the return type of the function after execution * So we use fn.getReturnType() as node type */ node.setType(fn.getReturnType()); node.setFn(fn); } else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_PREFIX) { Function fn = Expr.getBuiltinFunction( node.getOp().getName(), new Type[] {Type.BIGINT}, Function.CompareMode.IS_SUPERTYPE_OF); node.setType(Type.BIGINT); node.setFn(fn); } else if (node.getOp().getPos() == ArithmeticExpr.OperatorPosition.UNARY_POSTFIX) { throw new SemanticException("not yet implemented: expression analyzer for " + node.getClass().getName(), node.getPos()); } else { throw new SemanticException("not yet implemented: expression analyzer for " + node.getClass().getName(), node.getPos()); } return null; } @Override public Void visitTimestampArithmeticExpr(TimestampArithmeticExpr node, Scope scope) { node.setChild(0, TypeManager.addCastExpr(node.getChild(0), Type.DATETIME)); String funcOpName; if (node.getFuncName() != null) { if (ADD_DATE_FUNCTIONS.contains(node.getFuncName())) { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "add"); } else if (SUB_DATE_FUNCTIONS.contains(node.getFuncName())) { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "sub"); } else { node.setChild(1, TypeManager.addCastExpr(node.getChild(1), Type.DATETIME)); funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), "diff"); } } else { funcOpName = String.format("%sS_%s", node.getTimeUnitIdent(), (node.getOp() == ArithmeticExpr.Operator.ADD) ? "add" : "sub"); } Type[] argumentTypes = node.getChildren().stream().map(Expr::getType) .toArray(Type[]::new); Function fn = Expr.getBuiltinFunction(funcOpName.toLowerCase(), argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn == null) { String msg = String.format("No matching function with signature: %s(%s)", funcOpName, Joiner.on(", ") .join(Arrays.stream(argumentTypes).map(Type::toSql).collect(Collectors.toList()))); throw new SemanticException(msg, node.getPos()); } node.setType(fn.getReturnType()); node.setFn(fn); return null; } @Override public Void visitExistsPredicate(ExistsPredicate node, Scope scope) { predicateBaseAndCheck(node); return null; } @Override public Void visitInPredicate(InPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Expr> queryExpressions = Lists.newArrayList(); node.collect(arg -> arg instanceof Subquery, queryExpressions); if (queryExpressions.size() > 0 && node.getChildren().size() > 2) { throw new SemanticException("In Predicate only support literal expression list", node.getPos()); } List<Type> list = node.getChildren().stream().map(Expr::getType).collect(Collectors.toList()); Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(list); for (Expr child : node.getChildren()) { Type type = child.getType(); if (type.isJsonType()) { throw new SemanticException("InPredicate of JSON is not supported", child.getPos()); } if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException( "in predicate type " + type.toSql() + " with type " + compatibleType.toSql() + " is invalid", child.getPos()); } } return null; } @Override public Void visitMultiInPredicate(MultiInPredicate node, Scope scope) { predicateBaseAndCheck(node); List<Type> leftTypes = node.getChildren().stream().limit(node.getNumberOfColumns()).map(Expr::getType) .collect(Collectors.toList()); Subquery inSubquery = (Subquery) node.getChild(node.getNumberOfColumns()); List<Type> rightTypes = inSubquery.getQueryStatement().getQueryRelation().getOutputExpression().stream().map(Expr::getType). collect(Collectors.toList()); if (leftTypes.size() != rightTypes.size()) { throw new SemanticException( "subquery must return the same number of columns as provided by the IN predicate", node.getPos()); } for (int i = 0; i < rightTypes.size(); ++i) { if (leftTypes.get(i).isJsonType() || rightTypes.get(i).isJsonType() || leftTypes.get(i).isMapType() || rightTypes.get(i).isMapType() || leftTypes.get(i).isStructType() || rightTypes.get(i).isStructType()) { throw new SemanticException("InPredicate of JSON, Map, Struct types is not supported"); } if (!Type.canCastTo(leftTypes.get(i), rightTypes.get(i))) { throw new SemanticException( "in predicate type " + leftTypes.get(i).toSql() + " with type " + rightTypes.get(i).toSql() + " is invalid"); } } return null; } @Override public Void visitLiteral(LiteralExpr node, Scope scope) { if (node instanceof LargeIntLiteral) { BigInteger value = ((LargeIntLiteral) node).getValue(); if (value.compareTo(LargeIntLiteral.LARGE_INT_MIN) < 0 || value.compareTo(LargeIntLiteral.LARGE_INT_MAX) > 0) { throw new SemanticException(PARSER_ERROR_MSG.numOverflow(value.toString()), node.getPos()); } } return null; } @Override public Void visitIsNullPredicate(IsNullPredicate node, Scope scope) { predicateBaseAndCheck(node); return null; } @Override public Void visitLikePredicate(LikePredicate node, Scope scope) { predicateBaseAndCheck(node); Type type1 = node.getChild(0).getType(); Type type2 = node.getChild(1).getType(); if (!type1.isStringType() && !type1.isNull()) { throw new SemanticException( "left operand of " + node.getOp().toString() + " must be of type STRING: " + AstToStringBuilder.toString(node), node.getPos()); } if (!type2.isStringType() && !type2.isNull()) { throw new SemanticException( "right operand of " + node.getOp().toString() + " must be of type STRING: " + AstToStringBuilder.toString(node), node.getPos()); } if (LikePredicate.Operator.REGEXP.equals(node.getOp()) && !type2.isNull() && node.getChild(1).isLiteral()) { try { Pattern.compile(((StringLiteral) node.getChild(1)).getValue()); } catch (PatternSyntaxException e) { throw new SemanticException( "Invalid regular expression in '" + AstToStringBuilder.toString(node) + "'", node.getPos()); } } return null; } private void predicateBaseAndCheck(Predicate node) { node.setType(Type.BOOLEAN); for (Expr expr : node.getChildren()) { if (expr.getType().isOnlyMetricType() || (expr.getType().isComplexType() && !(node instanceof IsNullPredicate))) { throw new SemanticException( "HLL, BITMAP, PERCENTILE and ARRAY, MAP, STRUCT type couldn't as Predicate", node.getPos()); } } } @Override public Void visitCastExpr(CastExpr cast, Scope context) { Type castType; if (cast.isImplicit()) { castType = cast.getType(); } else { castType = cast.getTargetTypeDef().getType(); } if (!Type.canCastTo(cast.getChild(0).getType(), castType)) { throw new SemanticException("Invalid type cast from " + cast.getChild(0).getType().toSql() + " to " + castType.toSql() + " in sql `" + AstToStringBuilder.toString(cast.getChild(0)).replace("%", "%%") + "`", cast.getPos()); } cast.setType(castType); return null; } @Override private void checkFunction(String fnName, FunctionCallExpr node) { switch (fnName) { case FunctionSet.TIME_SLICE: case FunctionSet.DATE_SLICE: if (!(node.getChild(1) instanceof IntLiteral)) { throw new SemanticException( fnName + " requires second parameter must be a constant interval", node.getPos()); } if (((IntLiteral) node.getChild(1)).getValue() <= 0) { throw new SemanticException( fnName + " requires second parameter must be greater than 0", node.getPos()); } break; case FunctionSet.ARRAY_FILTER: if (node.getChildren().size() != 2) { throw new SemanticException(fnName + " should have 2 array inputs or lambda functions", node.getPos()); } if (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) { throw new SemanticException("The first input of " + fnName + " should be an array or a lambda function", node.getPos()); } if (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) { throw new SemanticException("The second input of " + fnName + " should be an array or a lambda function", node.getPos()); } if (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) { throw new SemanticException("The second input of array_filter " + node.getChild(1).getType().toString() + " can't cast to ARRAY<BOOL>", node.getPos()); } break; case FunctionSet.ARRAY_SORTBY: if (node.getChildren().size() != 2) { throw new SemanticException(fnName + " should have 2 array inputs or lambda functions", node.getPos()); } if (!node.getChild(0).getType().isArrayType() && !node.getChild(0).getType().isNull()) { throw new SemanticException("The first input of " + fnName + " should be an array or a lambda function", node.getPos()); } if (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) { throw new SemanticException("The second input of " + fnName + " should be an array or a lambda function", node.getPos()); } break; case FunctionSet.ARRAY_CONCAT: if (node.getChildren().size() < 2) { throw new SemanticException(fnName + " should have at least two inputs", node.getPos()); } break; case FunctionSet.ARRAY_GENERATE: if (node.getChildren().size() < 1 || node.getChildren().size() > 3) { throw new SemanticException(fnName + " has wrong input numbers"); } for (Expr expr : node.getChildren()) { if ((expr instanceof SlotRef) && node.getChildren().size() != 3) { throw new SemanticException(fnName + " with IntColumn doesn't support default parameters"); } if (!(expr instanceof IntLiteral) && !(expr instanceof LargeIntLiteral) && !(expr instanceof SlotRef) && !(expr instanceof NullLiteral)) { throw new SemanticException(fnName + "'s parameter only support Integer"); } } break; case FunctionSet.MAP_FILTER: if (node.getChildren().size() != 2) { throw new SemanticException(fnName + " should have 2 inputs, " + "but there are just " + node.getChildren().size() + " inputs."); } if (!node.getChild(0).getType().isMapType() && !node.getChild(0).getType().isNull()) { throw new SemanticException("The first input of " + fnName + " should be a map or a lambda function."); } if (!node.getChild(1).getType().isArrayType() && !node.getChild(1).getType().isNull()) { throw new SemanticException("The second input of " + fnName + " should be a array or a lambda function."); } if (!Type.canCastTo(node.getChild(1).getType(), Type.ARRAY_BOOLEAN)) { throw new SemanticException("The second input of map_filter " + node.getChild(1).getType().toString() + " can't cast to ARRAY<BOOL>"); } break; } } private Function getStrToDateFunction(FunctionCallExpr node, Type[] argumentTypes) { /* * @TODO: Determine the return type of this function * If is format is constant and don't contains time part, return date type, to compatible with mysql. * In fact we don't want to support str_to_date return date like mysql, reason: * 1. The return type of FE/BE str_to_date function signature is datetime, return date * let type different, it's will throw unpredictable error * 2. Support return date and datetime at same time in one function is complicated. * 3. The meaning of the function is confusing. In mysql, will return date if format is a constant * string and it's not contains "%H/%M/%S" pattern, but it's a trick logic, if format is a variable * expression, like: str_to_date(col1, col2), and the col2 is '%Y%m%d', the result always be * datetime. */ Function fn = Expr.getBuiltinFunction(node.getFnName().getFunction(), argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); if (fn == null) { return null; } if (!node.getChild(1).isConstant()) { return fn; } ExpressionMapping expressionMapping = new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields()), com.google.common.collect.Lists.newArrayList()); ScalarOperator format = SqlToScalarOperatorTranslator.translate(node.getChild(1), expressionMapping, new ColumnRefFactory()); if (format.isConstantRef() && !HAS_TIME_PART.matcher(format.toString()).matches()) { return Expr.getBuiltinFunction("str2date", argumentTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF); } return fn; } private Function getArrayGenerateFunction(FunctionCallExpr node) { if (node.getChildren().size() == 1) { LiteralExpr secondParam = (LiteralExpr) node.getChild(0); node.clearChildren(); node.addChild(new IntLiteral(1)); node.addChild(secondParam); } if (node.getChildren().size() == 2) { int idx = 0; BigInteger[] childValues = new BigInteger[2]; Boolean hasNUll = false; for (Expr expr : node.getChildren()) { if (expr instanceof NullLiteral) { hasNUll = true; } else if (expr instanceof IntLiteral) { childValues[idx++] = BigInteger.valueOf(((IntLiteral) expr).getValue()); } else { childValues[idx++] = ((LargeIntLiteral) expr).getValue(); } } if (hasNUll || childValues[0].compareTo(childValues[1]) < 0) { node.addChild(new IntLiteral(1)); } else { node.addChild(new IntLiteral(-1)); } } Type[] argumentTypes = node.getChildren().stream().map(Expr::getType).toArray(Type[]::new); return Expr.getBuiltinFunction(FunctionSet.ARRAY_GENERATE, argumentTypes, Function.CompareMode.IS_SUPERTYPE_OF); } @Override public Void visitGroupingFunctionCall(GroupingFunctionCallExpr node, Scope scope) { if (node.getChildren().size() < 1) { throw new SemanticException("GROUPING functions required at least one parameters", node.getPos()); } if (node.getChildren().stream().anyMatch(e -> !(e instanceof SlotRef))) { throw new SemanticException("grouping functions only support column", node.getPos()); } Type[] childTypes = new Type[1]; childTypes[0] = Type.BIGINT; Function fn = Expr.getBuiltinFunction(node.getFnName().getFunction(), childTypes, Function.CompareMode.IS_IDENTICAL); node.setFn(fn); node.setType(fn.getReturnType()); return null; } @Override public Void visitCaseWhenExpr(CaseExpr node, Scope context) { int start = 0; int end = node.getChildren().size(); Expr caseExpr = null; Expr elseExpr = null; if (node.hasCaseExpr()) { caseExpr = node.getChild(0); start++; } if (node.hasElseExpr()) { elseExpr = node.getChild(end - 1); end--; } if (node.getChildren().stream().anyMatch(d -> !d.getType().isScalarType())) { throw new SemanticException("case-when only support scalar type", node.getPos()); } List<Type> whenTypes = Lists.newArrayList(); if (null != caseExpr) { whenTypes.add(caseExpr.getType()); } for (int i = start; i < end; i = i + 2) { whenTypes.add(node.getChild(i).getType()); } Type compatibleType = Type.NULL; if (null != caseExpr) { compatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes); } for (Type type : whenTypes) { if (!Type.canCastTo(type, compatibleType)) { throw new SemanticException("Invalid when type cast " + type.toSql() + " to " + compatibleType.toSql(), node.getPos()); } } List<Type> thenTypes = Lists.newArrayList(); for (int i = start + 1; i < end; i = i + 2) { thenTypes.add(node.getChild(i).getType()); } if (null != elseExpr) { thenTypes.add(elseExpr.getType()); } Type returnType = thenTypes.stream().allMatch(Type.NULL::equals) ? Type.BOOLEAN : TypeManager.getCompatibleTypeForCaseWhen(thenTypes); for (Type type : thenTypes) { if (!Type.canCastTo(type, returnType)) { throw new SemanticException("Invalid then type cast " + type.toSql() + " to " + returnType.toSql(), node.getPos()); } } node.setType(returnType); return null; } @Override public Void visitSubquery(Subquery node, Scope context) { QueryAnalyzer queryAnalyzer = new QueryAnalyzer(session); queryAnalyzer.analyze(node.getQueryStatement(), context); node.setType(node.getQueryStatement().getQueryRelation().getRelationFields().getFieldByIndex(0).getType()); return null; } @Override public Void visitAnalyticExpr(AnalyticExpr node, Scope context) { visit(node.getFnCall(), context); node.setType(node.getFnCall().getType()); if (node.getWindow() != null) { if (node.getWindow().getLeftBoundary() != null && node.getWindow().getLeftBoundary().getExpr() != null) { visit(node.getWindow().getLeftBoundary().getExpr(), context); } if (node.getWindow().getRightBoundary() != null && node.getWindow().getRightBoundary().getExpr() != null) { visit(node.getWindow().getRightBoundary().getExpr(), context); } } node.getPartitionExprs().forEach(e -> visit(e, context)); node.getOrderByElements().stream().map(OrderByElement::getExpr).forEach(e -> visit(e, context)); verifyAnalyticExpression(node); return null; } @Override public Void visitInformationFunction(InformationFunction node, Scope context) { String funcType = node.getFuncType(); if (funcType.equalsIgnoreCase("DATABASE") || funcType.equalsIgnoreCase("SCHEMA")) { node.setType(Type.VARCHAR); node.setStrValue(ClusterNamespace.getNameFromFullName(session.getDatabase())); } else if (funcType.equalsIgnoreCase("USER")) { node.setType(Type.VARCHAR); String user = session.getQualifiedUser(); String remoteIP = session.getRemoteIP(); node.setStrValue(new UserIdentity(user, remoteIP).toString()); } else if (funcType.equalsIgnoreCase("CURRENT_USER")) { node.setType(Type.VARCHAR); node.setStrValue(session.getCurrentUserIdentity().toString()); } else if (funcType.equalsIgnoreCase("CURRENT_ROLE")) { node.setType(Type.VARCHAR); AuthorizationManager manager = session.getGlobalStateMgr().getAuthorizationManager(); List<String> roleName = new ArrayList<>(); try { for (Long roleId : session.getCurrentRoleIds()) { RolePrivilegeCollection rolePrivilegeCollection = manager.getRolePrivilegeCollectionUnlocked(roleId, false); if (rolePrivilegeCollection != null) { roleName.add(rolePrivilegeCollection.getName()); } } } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } if (roleName.isEmpty()) { node.setStrValue("NONE"); } else { node.setStrValue(Joiner.on(", ").join(roleName)); } } else if (funcType.equalsIgnoreCase("CONNECTION_ID")) { node.setType(Type.BIGINT); node.setIntValue(session.getConnectionId()); node.setStrValue(""); } else if (funcType.equalsIgnoreCase("CURRENT_CATALOG")) { node.setType(Type.VARCHAR); node.setStrValue(session.getCurrentCatalog().toString()); } return null; } @Override public Void visitVariableExpr(VariableExpr node, Scope context) { try { if (node.getSetType().equals(SetType.USER)) { UserVariable userVariable = session.getUserVariables(node.getName()); if (userVariable == null) { node.setType(Type.STRING); node.setIsNull(); return null; } Type variableType = userVariable.getEvaluatedExpression().getType(); node.setType(variableType); if (userVariable.getEvaluatedExpression() instanceof NullLiteral) { node.setIsNull(); } else { node.setValue(userVariable.getEvaluatedExpression().getRealObjectValue()); } } else { VariableMgr.fillValue(session.getSessionVariable(), node); if (!Strings.isNullOrEmpty(node.getName()) && node.getName().equalsIgnoreCase(SessionVariable.SQL_MODE)) { node.setType(Type.VARCHAR); node.setValue(SqlModeHelper.decode((long) node.getValue())); } } } catch (AnalysisException | DdlException e) { throw new SemanticException(e.getMessage()); } return null; } @Override public Void visitDefaultValueExpr(DefaultValueExpr node, Scope context) { node.setType(Type.VARCHAR); return null; } @Override public Void visitCloneExpr(CloneExpr node, Scope context) { return null; } }
Oh, I see, sorry I thought it's part of the generated code
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = identifier.getKind(); if (kind != null) { if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId()); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber(); Objects.requireNonNull(phoneNumberModel.getValue()); return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId()); Objects.requireNonNull(teamsUserIdentifierModel.getCloud()); Objects.requireNonNull(rawId); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId); return new UnknownIdentifier(rawId); } if (identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId()); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (identifier.getPhoneNumber() != null) { PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber(); Objects.requireNonNull(phoneNumberModel.getValue()); return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId); } if (identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId()); Objects.requireNonNull(teamsUserIdentifierModel.getCloud()); Objects.requireNonNull(rawId); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId); return new UnknownIdentifier(rawId); }
CommunicationIdentifierModelKind kind = identifier.getKind();
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = (identifier.getKind() != null) ? identifier.getKind() : extractKind(identifier); if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId(), "'ID' of the CommunicationIdentifierModel cannot be null."); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { String phoneNumber = identifier.getPhoneNumber().getValue(); Objects.requireNonNull(phoneNumber, "'PhoneNumber' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new PhoneNumberIdentifier(phoneNumber).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId(), "'UserID' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(teamsUserIdentifierModel.getCloud(), "'Cloud' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new UnknownIdentifier(rawId); }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } private static CommunicationIdentifierModelKind extractKind(CommunicationIdentifierModel identifier) { Objects.requireNonNull(identifier, "CommunicationIdentifierModel cannot be null."); if (identifier.getCommunicationUser() != null) { return CommunicationIdentifierModelKind.COMMUNICATION_USER; } if (identifier.getPhoneNumber() != null) { return CommunicationIdentifierModelKind.PHONE_NUMBER; } if (identifier.getMicrosoftTeamsUser() != null) { return CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER; } return CommunicationIdentifierModelKind.UNKNOWN; } }
But you're sure that all the response handlers we added earlier are always executed on the right thread at the right time? That's what gets me a bit worried. Note that I have no idea how this all works so I'm asking naive questions.
public void handle(final RoutingContext ctx) { ctx.response() .endHandler(currentManagedContextTerminationHandler) .exceptionHandler(currentManagedContextTerminationHandler) .closeHandler(currentManagedContextTerminationHandler); if (!currentManagedContext.isActive()) { currentManagedContext.activate(); } try { handleWithIdentity(ctx); currentManagedContext.deactivate(); } catch (Throwable t) { currentManagedContext.terminate(); throw t; } }
currentManagedContext.deactivate();
public void handle(final RoutingContext ctx) { ctx.response() .endHandler(currentManagedContextTerminationHandler) .exceptionHandler(currentManagedContextTerminationHandler) .closeHandler(currentManagedContextTerminationHandler); if (!currentManagedContext.isActive()) { currentManagedContext.activate(); } try { handleWithIdentity(ctx); currentManagedContext.deactivate(); } catch (Throwable t) { currentManagedContext.terminate(); throw t; } }
class SmallRyeGraphQLAbstractHandler implements Handler<RoutingContext> { private final CurrentIdentityAssociation currentIdentityAssociation; private final CurrentVertxRequest currentVertxRequest; private final ManagedContext currentManagedContext; private final Handler currentManagedContextTerminationHandler; private final boolean runBlocking; private volatile ExecutionService executionService; protected static final JsonReaderFactory jsonReaderFactory = Json.createReaderFactory(null); public SmallRyeGraphQLAbstractHandler( CurrentIdentityAssociation currentIdentityAssociation, CurrentVertxRequest currentVertxRequest, boolean runBlocking) { this.currentIdentityAssociation = currentIdentityAssociation; this.currentVertxRequest = currentVertxRequest; this.currentManagedContext = Arc.container().requestContext(); this.runBlocking = runBlocking; this.currentManagedContextTerminationHandler = new Handler() { @Override public void handle(Object e) { currentManagedContext.terminate(); } }; } @Override private Void handleWithIdentity(final RoutingContext ctx) { if (currentIdentityAssociation != null) { QuarkusHttpUser existing = (QuarkusHttpUser) ctx.user(); if (existing != null) { SecurityIdentity identity = existing.getSecurityIdentity(); currentIdentityAssociation.setIdentity(identity); } else { currentIdentityAssociation.setIdentity(QuarkusHttpUser.getSecurityIdentity(ctx, null)); } } currentVertxRequest.setCurrent(ctx); doHandle(ctx); return null; } protected abstract void doHandle(final RoutingContext ctx); protected JsonObject inputToJsonObject(String input) { try (JsonReader jsonReader = jsonReaderFactory.createReader(new StringReader(input))) { return jsonReader.readObject(); } } protected ExecutionService getExecutionService() { if (this.executionService == null) { this.executionService = Arc.container().instance(ExecutionService.class).get(); } return this.executionService; } protected Map<String, Object> getMetaData(RoutingContext ctx) { Map<String, Object> metaData = new ConcurrentHashMap<>(); metaData.put("runBlocking", runBlocking); metaData.put("httpHeaders", getHeaders(ctx)); InjectableContext.ContextState state = currentManagedContext.getState(); metaData.put("state", state); return metaData; } private Map<String, List<String>> getHeaders(RoutingContext ctx) { Map<String, List<String>> h = new HashMap<>(); MultiMap headers = ctx.request().headers(); for (String header : headers.names()) { h.put(header, headers.getAll(header)); } return h; } }
class SmallRyeGraphQLAbstractHandler implements Handler<RoutingContext> { private final CurrentIdentityAssociation currentIdentityAssociation; private final CurrentVertxRequest currentVertxRequest; private final ManagedContext currentManagedContext; private final Handler currentManagedContextTerminationHandler; private final boolean runBlocking; private volatile ExecutionService executionService; protected static final JsonReaderFactory jsonReaderFactory = Json.createReaderFactory(null); public SmallRyeGraphQLAbstractHandler( CurrentIdentityAssociation currentIdentityAssociation, CurrentVertxRequest currentVertxRequest, boolean runBlocking) { this.currentIdentityAssociation = currentIdentityAssociation; this.currentVertxRequest = currentVertxRequest; this.currentManagedContext = Arc.container().requestContext(); this.runBlocking = runBlocking; this.currentManagedContextTerminationHandler = new Handler() { @Override public void handle(Object e) { currentManagedContext.terminate(); } }; } @Override private Void handleWithIdentity(final RoutingContext ctx) { if (currentIdentityAssociation != null) { QuarkusHttpUser existing = (QuarkusHttpUser) ctx.user(); if (existing != null) { SecurityIdentity identity = existing.getSecurityIdentity(); currentIdentityAssociation.setIdentity(identity); } else { currentIdentityAssociation.setIdentity(QuarkusHttpUser.getSecurityIdentity(ctx, null)); } } currentVertxRequest.setCurrent(ctx); doHandle(ctx); return null; } protected abstract void doHandle(final RoutingContext ctx); protected JsonObject inputToJsonObject(String input) { try (JsonReader jsonReader = jsonReaderFactory.createReader(new StringReader(input))) { return jsonReader.readObject(); } } protected ExecutionService getExecutionService() { if (this.executionService == null) { this.executionService = Arc.container().instance(ExecutionService.class).get(); } return this.executionService; } protected Map<String, Object> getMetaData(RoutingContext ctx) { Map<String, Object> metaData = new ConcurrentHashMap<>(); metaData.put("runBlocking", runBlocking); metaData.put("httpHeaders", getHeaders(ctx)); InjectableContext.ContextState state = currentManagedContext.getState(); metaData.put("state", state); return metaData; } private Map<String, List<String>> getHeaders(RoutingContext ctx) { Map<String, List<String>> h = new HashMap<>(); MultiMap headers = ctx.request().headers(); for (String header : headers.names()) { h.put(header, headers.getAll(header)); } return h; } }
```suggestion new String[] {"info name", "info value"}, ```
public TableResultInternal execute(Context ctx) { CatalogDescriptor catalogDescriptor = ctx.getCatalogManager() .getCatalogDescriptor(catalogName) .orElseThrow( () -> new ValidationException( String.format( "Cannot obtain metadata information from Catalog %s.", catalogName))); Map<String, String> properties = catalogDescriptor.getConfiguration().toMap(); List<List<Object>> rows = new ArrayList<>( Arrays.asList( Arrays.asList("name", catalogName), Arrays.asList( "type", properties.getOrDefault( CommonCatalogOptions.CATALOG_TYPE.key(), "")), Arrays.asList("comment", "") )); if (isExtended) { properties.forEach((key, value) -> rows.add(Arrays.asList("option:" + key, value))); } return buildTableResult( Arrays.asList("info name", "info value").toArray(new String[0]), Arrays.asList(DataTypes.STRING(), DataTypes.STRING()).toArray(new DataType[0]), rows.stream().map(List::toArray).toArray(Object[][]::new)); }
Arrays.asList("info name", "info value").toArray(new String[0]),
public TableResultInternal execute(Context ctx) { CatalogDescriptor catalogDescriptor = ctx.getCatalogManager() .getCatalogDescriptor(catalogName) .orElseThrow( () -> new ValidationException( String.format( "Cannot obtain metadata information from Catalog %s.", catalogName))); Map<String, String> properties = catalogDescriptor.getConfiguration().toMap(); List<List<Object>> rows = new ArrayList<>( Arrays.asList( Arrays.asList("name", catalogName), Arrays.asList( "type", properties.getOrDefault( CommonCatalogOptions.CATALOG_TYPE.key(), "")), Arrays.asList("comment", ""))); if (isExtended) { properties.entrySet().stream() .filter( entry -> !CommonCatalogOptions.CATALOG_TYPE.key().equals(entry.getKey())) .sorted(Map.Entry.comparingByKey()) .forEach( entry -> rows.add( Arrays.asList( String.format("option:%s", entry.getKey()), entry.getValue()))); } return buildTableResult( new String[] {"info name", "info value"}, new DataType[] {DataTypes.STRING(), DataTypes.STRING()}, rows.stream().map(List::toArray).toArray(Object[][]::new)); }
class DescribeCatalogOperation implements Operation, ExecutableOperation { private final String catalogName; private final boolean isExtended; public DescribeCatalogOperation(String catalogName, boolean isExtended) { this.catalogName = catalogName; this.isExtended = isExtended; } public String getCatalogName() { return catalogName; } public boolean isExtended() { return isExtended; } @Override public String asSummaryString() { Map<String, Object> params = new LinkedHashMap<>(); params.put("identifier", catalogName); params.put("isExtended", isExtended); return OperationUtils.formatWithChildren( "DESCRIBE CATALOG", params, Collections.emptyList(), Operation::asSummaryString); } @Override }
class DescribeCatalogOperation implements Operation, ExecutableOperation { private final String catalogName; private final boolean isExtended; public DescribeCatalogOperation(String catalogName, boolean isExtended) { this.catalogName = catalogName; this.isExtended = isExtended; } public String getCatalogName() { return catalogName; } public boolean isExtended() { return isExtended; } @Override public String asSummaryString() { Map<String, Object> params = new LinkedHashMap<>(); params.put("identifier", catalogName); params.put("isExtended", isExtended); return OperationUtils.formatWithChildren( "DESCRIBE CATALOG", params, Collections.emptyList(), Operation::asSummaryString); } @Override }
good to narrow down exception ! thanks
public boolean start() throws IOException { restClient = source.spec.getConnectionConfiguration().createClient(); String query = source.spec.getQuery(); if (query == null) { query = "{\"query\": { \"match_all\": {} }}"; } if (source.backendVersion == 5 && source.numSlices != null && source.numSlices > 1) { String sliceQuery = String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices); query = query.replaceFirst("\\{", "{" + sliceQuery + ","); } Response response; String endPoint = String.format( "/%s/%s/_search", source.spec.getConnectionConfiguration().getIndex(), source.spec.getConnectionConfiguration().getType()); Map<String, String> params = new HashMap<>(); params.put("scroll", source.spec.getScrollKeepalive()); if (source.backendVersion == 2) { params.put("size", String.valueOf(source.spec.getBatchSize())); if (source.shardPreference != null) { params.put("preference", "_shards:" + source.shardPreference); } } HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON); response = restClient.performRequest("GET", endPoint, params, queryEntity); JsonNode searchResult = parseResponse(response); updateScrollId(searchResult); return readNextBatchAndReturnFirstDocument(searchResult); } private void updateScrollId(JsonNode searchResult) { scrollId = searchResult.path("_scroll_id").asText(); } @Override public boolean advance() throws IOException { if (batchIterator.hasNext()) { current = batchIterator.next(); return true; } else { String requestBody = String.format( "{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}", source.spec.getScrollKeepalive(), scrollId); HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON); Response response = restClient.performRequest( "GET", "/_search/scroll", Collections.emptyMap(), scrollEntity); JsonNode searchResult = parseResponse(response); updateScrollId(searchResult); return readNextBatchAndReturnFirstDocument(searchResult); } } private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) { JsonNode hits = searchResult.path("hits").path("hits"); if (hits.size() == 0) { current = null; batchIterator = null; return false; } List<String> batch = new ArrayList<>(); boolean withMetadata = source.spec.isWithMetadata(); for (JsonNode hit : hits) { if (withMetadata) { batch.add(hit.toString()); } else { String document = hit.path("_source").toString(); batch.add(document); } } batchIterator = batch.listIterator(); current = batchIterator.next(); return true; } @Override public String getCurrent() throws NoSuchElementException { if (current == null) { throw new NoSuchElementException(); } return current; } @Override public void close() throws IOException { String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId); HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON); try { restClient.performRequest("DELETE", "/_search/scroll", Collections.emptyMap(), entity); } finally { if (restClient != null) { restClient.close(); } } } @Override public BoundedSource<String> getCurrentSource() { return source; } } /** * A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry * will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for * 429 TOO_MANY_REQUESTS error. */ public static class RetryConfiguration extends BaseRetryConfiguration { private RetryConfiguration( int maxAttempts, Duration maxDuration, RetryPredicate retryPredicate) { super(maxAttempts, maxDuration, retryPredicate); } /** * Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts, * maxDurations and exponential backoff based retries. */ public static RetryConfiguration create(int maxAttempts, Duration maxDuration) { checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0"); checkArgument( maxDuration != null && maxDuration.isLongerThan(Duration.ZERO), "maxDuration must be greater than 0"); return new RetryConfiguration(maxAttempts, maxDuration, DEFAULT_RETRY_PREDICATE); } @VisibleForTesting RetryConfiguration withRetryPredicate(RetryPredicate predicate) { this.retryPredicate = predicate; return this; } @VisibleForTesting static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate(); /** * This is the default predicate used to test if a failed ES operation should be retried. A * retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes * first, for TOO_MANY_REQUESTS(429) error. */ @VisibleForTesting static class DefaultRetryPredicate implements RetryPredicate { private int errorCode; DefaultRetryPredicate(int code) { this.errorCode = code; } DefaultRetryPredicate() { this(429); } /** Returns true if the response has the error code for any mutation. */ private static boolean errorCodePresent(Response response, int errorCode) { try { JsonNode json = parseResponse(response); if (json.path("errors").asBoolean()) { for (JsonNode item : json.path("items")) { if (item.findValue("status").asInt() == errorCode) { return true; } } } } catch (IOException e) { LOG.warn("Could not extract error codes from response {}", response); } return false; } @Override public boolean test(Throwable t) { return (t instanceof ResponseException) && errorCodePresent(((ResponseException) t).getResponse(), this.errorCode); } } } /** A {@link PTransform} writing data to Elasticsearch. */ @AutoValue public abstract static class Write extends PTransform<PCollection<String>, PDone> { /** * Interface allowing a specific field value to be returned from a parsed JSON document. This is * used for using explicit document ids, and for dynamic routing (index/Type) on a document * basis. A null response will result in default behaviour and an exception will be propagated * as a failure. */ public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {} @Nullable abstract ConnectionConfiguration getConnectionConfiguration(); abstract long getMaxBatchSize(); abstract long getMaxBatchSizeBytes(); @Nullable abstract FieldValueExtractFn getIdFn(); @Nullable abstract FieldValueExtractFn getIndexFn(); @Nullable abstract FieldValueExtractFn getTypeFn(); @Nullable abstract RetryConfiguration getRetryConfiguration(); abstract boolean getUsePartialUpdate(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration); abstract Builder setMaxBatchSize(long maxBatchSize); abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes); abstract Builder setIdFn(FieldValueExtractFn idFunction); abstract Builder setIndexFn(FieldValueExtractFn indexFn); abstract Builder setTypeFn(FieldValueExtractFn typeFn); abstract Builder setUsePartialUpdate(boolean usePartialUpdate); abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration); abstract Write build(); } /** * Provide the Elasticsearch connection configuration object. * * @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object * @return the {@link Write} with connection configuration set */ public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) { checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null"); return builder().setConnectionConfiguration(connectionConfiguration).build(); } /** * Provide a maximum size in number of documents for the batch see bulk API * (https: * docs (like Elasticsearch bulk size advice). See * https: * execution engine, size of bundles may vary, this sets the maximum size. Change this if you * need to have smaller ElasticSearch bulks. * * @param batchSize maximum batch size in number of documents * @return the {@link Write} with connection batch size set */ public Write withMaxBatchSize(long batchSize) { checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize); return builder().setMaxBatchSize(batchSize).build(); } /** * Provide a maximum size in bytes for the batch see bulk API * (https: * (like Elasticsearch bulk size advice). See * https: * execution engine, size of bundles may vary, this sets the maximum size. Change this if you * need to have smaller ElasticSearch bulks. * * @param batchSizeBytes maximum batch size in bytes * @return the {@link Write} with connection batch size in bytes set */ public Write withMaxBatchSizeBytes(long batchSizeBytes) { checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes); return builder().setMaxBatchSizeBytes(batchSizeBytes).build(); } /** * Provide a function to extract the id from the document. This id will be used as the document * id in Elasticsearch. Should the function throw an Exception then the batch will fail and the * exception propagated. * * @param idFn to extract the document ID * @return the {@link Write} with the function set */ public Write withIdFn(FieldValueExtractFn idFn) { checkArgument(idFn != null, "idFn must not be null"); return builder().setIdFn(idFn).build(); } /** * Provide a function to extract the target index from the document allowing for dynamic * document routing. Should the function throw an Exception then the batch will fail and the * exception propagated. * * @param indexFn to extract the destination index from * @return the {@link Write} with the function set */ public Write withIndexFn(FieldValueExtractFn indexFn) { checkArgument(indexFn != null, "indexFn must not be null"); return builder().setIndexFn(indexFn).build(); } /** * Provide a function to extract the target type from the document allowing for dynamic document * routing. Should the function throw an Exception then the batch will fail and the exception * propagated. Users are encouraged to consider carefully if multipe types are a sensible model * <a * href="https: * discussed in this blog</a>. * * @param typeFn to extract the destination index from * @return the {@link Write} with the function set */ public Write withTypeFn(FieldValueExtractFn typeFn) { checkArgument(typeFn != null, "typeFn must not be null"); return builder().setTypeFn(typeFn).build(); } /** * Provide an instruction to control whether partial updates or inserts (default) are issued to * Elasticsearch. * * @param usePartialUpdate set to true to issue partial updates * @return the {@link Write} with the partial update control set */ public Write withUsePartialUpdate(boolean usePartialUpdate) { return builder().setUsePartialUpdate(usePartialUpdate).build(); } /** * Provides configuration to retry a failed batch call to Elastic Search. A batch is considered * as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one * or more of the items in the {@link Response}. Users should consider that retrying might * compound the underlying problem which caused the initial failure. Users should also be aware * that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then * opt to retry the current partition in entirety or abort if the max number of retries of the * runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of * 5 seconds and then surfacing the error once the maximum number of retries or maximum * configuration duration is exceeded. * * <p>Example use: * * <pre>{@code * ElasticsearchIO.write() * .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3)) * ... * }</pre> * * @param retryConfiguration the rules which govern the retry behavior * @return the {@link Write} with retrying configured */ public Write withRetryConfiguration(RetryConfiguration retryConfiguration) { checkArgument(retryConfiguration != null, "retryConfiguration is required"); return builder().setRetryConfiguration(retryConfiguration).build(); } @Override public PDone expand(PCollection<String> input) { ConnectionConfiguration connectionConfiguration = getConnectionConfiguration(); checkState(connectionConfiguration != null, "withConnectionConfiguration() is required"); input.apply(ParDo.of(new WriteFn(this))); return PDone.in(input.getPipeline()); } /** {@link DoFn} to for the {@link Write} transform. */ @VisibleForTesting static class WriteFn extends DoFn<String, Void> { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final int DEFAULT_RETRY_ON_CONFLICT = 5; private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5); private static final Duration RETRY_MAX_BACKOFF = Duration.standardDays(365); @VisibleForTesting static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]"; @VisibleForTesting static final String RETRY_FAILED_LOG = "Error writing to ES after %d attempt(s). No more attempts allowed"; private transient FluentBackoff retryBackoff; private int backendVersion; private final Write spec; private transient RestClient restClient; private ArrayList<String> batch; private long currentBatchSizeBytes; @JsonPropertyOrder({"_index", "_type", "_id"}) @JsonInclude(JsonInclude.Include.NON_NULL) private static class DocumentMetadata implements Serializable { @JsonProperty("_index") final String index; @JsonProperty("_type") final String type; @JsonProperty("_id") final String id; @JsonProperty("_retry_on_conflict") final Integer retryOnConflict; DocumentMetadata(String index, String type, String id, Integer retryOnConflict) { this.index = index; this.type = type; this.id = id; this.retryOnConflict = retryOnConflict; } } @VisibleForTesting WriteFn(Write spec) { this.spec = spec; } @Setup public void setup() throws IOException { ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration(); backendVersion = getBackendVersion(connectionConfiguration); restClient = connectionConfiguration.createClient(); retryBackoff = FluentBackoff.DEFAULT .withMaxRetries(0) .withInitialBackoff(RETRY_INITIAL_BACKOFF) .withMaxCumulativeBackoff(RETRY_MAX_BACKOFF); if (spec.getRetryConfiguration() != null) { retryBackoff = FluentBackoff.DEFAULT .withInitialBackoff(RETRY_INITIAL_BACKOFF) .withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1) .withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration()); } } @StartBundle public void startBundle(StartBundleContext context) { batch = new ArrayList<>(); currentBatchSizeBytes = 0; } /** * Extracts the components that comprise the document address from the document using the * {@link FieldValueExtractFn} configured. This allows any or all of the index, type and * document id to be controlled on a per document basis. If none are provided then an empty * default of {@code {}} is returned. Sanitization of the index is performed, automatically * lower-casing the value as required by Elasticsearch. * * @param document the json from which the index, type and id may be extracted * @return the document address as JSON or the default * @throws IOException if the document cannot be parsed as JSON */ private String getDocumentMetadata(String document) throws IOException { if (spec.getIndexFn() != null || spec.getTypeFn() != null || spec.getIdFn() != null) { JsonNode parsedDocument = OBJECT_MAPPER.readTree(document); DocumentMetadata metadata = new DocumentMetadata( spec.getIndexFn() != null ? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument)) : null, spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null, spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null, spec.getUsePartialUpdate() ? DEFAULT_RETRY_ON_CONFLICT : null); return OBJECT_MAPPER.writeValueAsString(metadata); } else { return "{}"; } } private static String lowerCaseOrNull(String input) { return input == null ? null : input.toLowerCase(); } @ProcessElement public void processElement(ProcessContext context) throws Exception { String document = context.element(); String documentMetadata = getDocumentMetadata(document); if (spec.getUsePartialUpdate()) { batch.add( String.format( "{ \"update\" : %s }%n{ \"doc\" : %s, \"doc_as_upsert\" : true }%n", documentMetadata, document)); } else { batch.add(String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document)); } currentBatchSizeBytes += document.getBytes(StandardCharsets.UTF_8).length; if (batch.size() >= spec.getMaxBatchSize() || currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) { flushBatch(); } } @FinishBundle public void finishBundle(FinishBundleContext context) throws IOException, InterruptedException { flushBatch(); } private void flushBatch() throws IOException, InterruptedException { if (batch.isEmpty()) { return; } StringBuilder bulkRequest = new StringBuilder(); for (String json : batch) { bulkRequest.append(json); } batch.clear(); currentBatchSizeBytes = 0; Response response = null; String endPoint = String.format( "/%s/%s/_bulk", spec.getConnectionConfiguration().getIndex(), spec.getConnectionConfiguration().getType()); HttpEntity requestBody = new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON); response = restClient.performRequest("POST", endPoint, Collections.emptyMap(), requestBody); if (spec.getRetryConfiguration() != null && spec.getRetryConfiguration() .getRetryPredicate() .test(new ResponseException(response))) { response = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody); } checkForErrors(response, backendVersion); } /** retry request based on retry configuration policy. */ private Response handleRetry( String method, String endpoint, Map<String, String> params, HttpEntity requestBody) throws IOException, InterruptedException { Response response = null; Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = retryBackoff.backoff(); int attempt = 0; while (BackOffUtils.next(sleeper, backoff)) { LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt)); response = restClient.performRequest(method, endpoint, params, requestBody); if (spec.getRetryConfiguration() .getRetryPredicate() .test(new ResponseException(response))) { continue; } return response; } throw new IOException(String.format(RETRY_FAILED_LOG, attempt)); } @Teardown public void closeClient() throws IOException { if (restClient != null) { restClient.close(); } } } } static int getBackendVersion(ConnectionConfiguration connectionConfiguration) { try (RestClient restClient = connectionConfiguration.createClient()) { Response response = restClient.performRequest("GET", ""); JsonNode jsonNode = parseResponse(response); int backendVersion = Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1)); checkArgument( (backendVersion == 2 || backendVersion == 5), "The Elasticsearch version to connect to is %s.x. " + "This version of the ElasticsearchIO is only compatible with " + "Elasticsearch v5.x and v2.x", backendVersion); return backendVersion; } catch (IOException e) { throw (new IllegalArgumentException("Cannot get Elasticsearch version")); } } }
return backendVersion;
public boolean start() throws IOException { restClient = source.spec.getConnectionConfiguration().createClient(); String query = source.spec.getQuery(); if (query == null) { query = "{\"query\": { \"match_all\": {} }}"; } if (source.backendVersion == 5 && source.numSlices != null && source.numSlices > 1) { String sliceQuery = String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices); query = query.replaceFirst("\\{", "{" + sliceQuery + ","); } Response response; String endPoint = String.format( "/%s/%s/_search", source.spec.getConnectionConfiguration().getIndex(), source.spec.getConnectionConfiguration().getType()); Map<String, String> params = new HashMap<>(); params.put("scroll", source.spec.getScrollKeepalive()); if (source.backendVersion == 2) { params.put("size", String.valueOf(source.spec.getBatchSize())); if (source.shardPreference != null) { params.put("preference", "_shards:" + source.shardPreference); } } HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON); response = restClient.performRequest("GET", endPoint, params, queryEntity); JsonNode searchResult = parseResponse(response); updateScrollId(searchResult); return readNextBatchAndReturnFirstDocument(searchResult); } private void updateScrollId(JsonNode searchResult) { scrollId = searchResult.path("_scroll_id").asText(); } @Override public boolean advance() throws IOException { if (batchIterator.hasNext()) { current = batchIterator.next(); return true; } else { String requestBody = String.format( "{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}", source.spec.getScrollKeepalive(), scrollId); HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON); Response response = restClient.performRequest( "GET", "/_search/scroll", Collections.emptyMap(), scrollEntity); JsonNode searchResult = parseResponse(response); updateScrollId(searchResult); return readNextBatchAndReturnFirstDocument(searchResult); } } private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) { JsonNode hits = searchResult.path("hits").path("hits"); if (hits.size() == 0) { current = null; batchIterator = null; return false; } List<String> batch = new ArrayList<>(); boolean withMetadata = source.spec.isWithMetadata(); for (JsonNode hit : hits) { if (withMetadata) { batch.add(hit.toString()); } else { String document = hit.path("_source").toString(); batch.add(document); } } batchIterator = batch.listIterator(); current = batchIterator.next(); return true; } @Override public String getCurrent() throws NoSuchElementException { if (current == null) { throw new NoSuchElementException(); } return current; } @Override public void close() throws IOException { String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId); HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON); try { restClient.performRequest("DELETE", "/_search/scroll", Collections.emptyMap(), entity); } finally { if (restClient != null) { restClient.close(); } } } @Override public BoundedSource<String> getCurrentSource() { return source; } } /** * A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry * will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for * 429 TOO_MANY_REQUESTS error. */ @AutoValue public abstract static class RetryConfiguration implements Serializable { @VisibleForTesting static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate(); abstract int getMaxAttempts(); abstract Duration getMaxDuration(); abstract RetryPredicate getRetryPredicate(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract ElasticsearchIO.RetryConfiguration.Builder setMaxAttempts(int maxAttempts); abstract ElasticsearchIO.RetryConfiguration.Builder setMaxDuration(Duration maxDuration); abstract ElasticsearchIO.RetryConfiguration.Builder setRetryPredicate( RetryPredicate retryPredicate); abstract ElasticsearchIO.RetryConfiguration build(); } /** * Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts, * maxDurations and exponential backoff based retries. * * @param maxAttempts max number of attempts. * @param maxDuration maximum duration for retries. * @return {@link RetryConfiguration} object with provided settings. */ public static RetryConfiguration create(int maxAttempts, Duration maxDuration) { checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0"); checkArgument( maxDuration != null && maxDuration.isLongerThan(Duration.ZERO), "maxDuration must be greater than 0"); return new AutoValue_ElasticsearchIO_RetryConfiguration.Builder() .setMaxAttempts(maxAttempts) .setMaxDuration(maxDuration) .setRetryPredicate(DEFAULT_RETRY_PREDICATE) .build(); } @VisibleForTesting RetryConfiguration withRetryPredicate(RetryPredicate predicate) { checkArgument(predicate != null, "predicate must be provided"); return builder().setRetryPredicate(predicate).build(); } /** * An interface used to control if we retry the Elasticsearch call when a {@link Response} is * obtained. If {@link RetryPredicate * the requests to the Elasticsearch server if the {@link RetryConfiguration} permits it. */ @FunctionalInterface interface RetryPredicate extends Predicate<Response>, Serializable {} /** * This is the default predicate used to test if a failed ES operation should be retried. A * retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes * first, for TOO_MANY_REQUESTS(429) error. */ @VisibleForTesting static class DefaultRetryPredicate implements RetryPredicate { private int errorCode; DefaultRetryPredicate(int code) { this.errorCode = code; } DefaultRetryPredicate() { this(429); } /** Returns true if the response has the error code for any mutation. */ private static boolean errorCodePresent(Response response, int errorCode) { try { JsonNode json = parseResponse(response); if (json.path("errors").asBoolean()) { for (JsonNode item : json.path("items")) { if (item.findValue("status").asInt() == errorCode) { return true; } } } } catch (IOException e) { LOG.warn("Could not extract error codes from response {}", response); } return false; } @Override public boolean test(Response response) { return errorCodePresent(response, errorCode); } } } /** A {@link PTransform} writing data to Elasticsearch. */ @AutoValue public abstract static class Write extends PTransform<PCollection<String>, PDone> { /** * Interface allowing a specific field value to be returned from a parsed JSON document. This is * used for using explicit document ids, and for dynamic routing (index/Type) on a document * basis. A null response will result in default behaviour and an exception will be propagated * as a failure. */ public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {} @Nullable abstract ConnectionConfiguration getConnectionConfiguration(); abstract long getMaxBatchSize(); abstract long getMaxBatchSizeBytes(); @Nullable abstract FieldValueExtractFn getIdFn(); @Nullable abstract FieldValueExtractFn getIndexFn(); @Nullable abstract FieldValueExtractFn getTypeFn(); @Nullable abstract RetryConfiguration getRetryConfiguration(); abstract boolean getUsePartialUpdate(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration); abstract Builder setMaxBatchSize(long maxBatchSize); abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes); abstract Builder setIdFn(FieldValueExtractFn idFunction); abstract Builder setIndexFn(FieldValueExtractFn indexFn); abstract Builder setTypeFn(FieldValueExtractFn typeFn); abstract Builder setUsePartialUpdate(boolean usePartialUpdate); abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration); abstract Write build(); } /** * Provide the Elasticsearch connection configuration object. * * @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object * @return the {@link Write} with connection configuration set */ public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) { checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null"); return builder().setConnectionConfiguration(connectionConfiguration).build(); } /** * Provide a maximum size in number of documents for the batch see bulk API * (https: * docs (like Elasticsearch bulk size advice). See * https: * execution engine, size of bundles may vary, this sets the maximum size. Change this if you * need to have smaller ElasticSearch bulks. * * @param batchSize maximum batch size in number of documents * @return the {@link Write} with connection batch size set */ public Write withMaxBatchSize(long batchSize) { checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize); return builder().setMaxBatchSize(batchSize).build(); } /** * Provide a maximum size in bytes for the batch see bulk API * (https: * (like Elasticsearch bulk size advice). See * https: * execution engine, size of bundles may vary, this sets the maximum size. Change this if you * need to have smaller ElasticSearch bulks. * * @param batchSizeBytes maximum batch size in bytes * @return the {@link Write} with connection batch size in bytes set */ public Write withMaxBatchSizeBytes(long batchSizeBytes) { checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes); return builder().setMaxBatchSizeBytes(batchSizeBytes).build(); } /** * Provide a function to extract the id from the document. This id will be used as the document * id in Elasticsearch. Should the function throw an Exception then the batch will fail and the * exception propagated. * * @param idFn to extract the document ID * @return the {@link Write} with the function set */ public Write withIdFn(FieldValueExtractFn idFn) { checkArgument(idFn != null, "idFn must not be null"); return builder().setIdFn(idFn).build(); } /** * Provide a function to extract the target index from the document allowing for dynamic * document routing. Should the function throw an Exception then the batch will fail and the * exception propagated. * * @param indexFn to extract the destination index from * @return the {@link Write} with the function set */ public Write withIndexFn(FieldValueExtractFn indexFn) { checkArgument(indexFn != null, "indexFn must not be null"); return builder().setIndexFn(indexFn).build(); } /** * Provide a function to extract the target type from the document allowing for dynamic document * routing. Should the function throw an Exception then the batch will fail and the exception * propagated. Users are encouraged to consider carefully if multipe types are a sensible model * <a * href="https: * discussed in this blog</a>. * * @param typeFn to extract the destination index from * @return the {@link Write} with the function set */ public Write withTypeFn(FieldValueExtractFn typeFn) { checkArgument(typeFn != null, "typeFn must not be null"); return builder().setTypeFn(typeFn).build(); } /** * Provide an instruction to control whether partial updates or inserts (default) are issued to * Elasticsearch. * * @param usePartialUpdate set to true to issue partial updates * @return the {@link Write} with the partial update control set */ public Write withUsePartialUpdate(boolean usePartialUpdate) { return builder().setUsePartialUpdate(usePartialUpdate).build(); } /** * Provides configuration to retry a failed batch call to Elasticsearch. A batch is considered * as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one * or more of the items in the {@link Response}. Users should consider that retrying might * compound the underlying problem which caused the initial failure. Users should also be aware * that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then * opt to retry the current bundle in entirety or abort if the max number of retries of the * runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of * 5 seconds and then surfacing the error once the maximum number of retries or maximum * configuration duration is exceeded. * * <p>Example use: * * <pre>{@code * ElasticsearchIO.write() * .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3)) * ... * }</pre> * * @param retryConfiguration the rules which govern the retry behavior * @return the {@link Write} with retrying configured */ public Write withRetryConfiguration(RetryConfiguration retryConfiguration) { checkArgument(retryConfiguration != null, "retryConfiguration is required"); return builder().setRetryConfiguration(retryConfiguration).build(); } @Override public PDone expand(PCollection<String> input) { ConnectionConfiguration connectionConfiguration = getConnectionConfiguration(); checkState(connectionConfiguration != null, "withConnectionConfiguration() is required"); input.apply(ParDo.of(new WriteFn(this))); return PDone.in(input.getPipeline()); } /** {@link DoFn} to for the {@link Write} transform. */ @VisibleForTesting static class WriteFn extends DoFn<String, Void> { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final int DEFAULT_RETRY_ON_CONFLICT = 5; private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5); @VisibleForTesting static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]"; @VisibleForTesting static final String RETRY_FAILED_LOG = "Error writing to ES after %d attempt(s). No more attempts allowed"; private transient FluentBackoff retryBackoff; private int backendVersion; private final Write spec; private transient RestClient restClient; private ArrayList<String> batch; private long currentBatchSizeBytes; @JsonPropertyOrder({"_index", "_type", "_id"}) @JsonInclude(JsonInclude.Include.NON_NULL) private static class DocumentMetadata implements Serializable { @JsonProperty("_index") final String index; @JsonProperty("_type") final String type; @JsonProperty("_id") final String id; @JsonProperty("_retry_on_conflict") final Integer retryOnConflict; DocumentMetadata(String index, String type, String id, Integer retryOnConflict) { this.index = index; this.type = type; this.id = id; this.retryOnConflict = retryOnConflict; } } @VisibleForTesting WriteFn(Write spec) { this.spec = spec; } @Setup public void setup() throws IOException { ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration(); backendVersion = getBackendVersion(connectionConfiguration); restClient = connectionConfiguration.createClient(); retryBackoff = FluentBackoff.DEFAULT.withMaxRetries(0).withInitialBackoff(RETRY_INITIAL_BACKOFF); if (spec.getRetryConfiguration() != null) { retryBackoff = FluentBackoff.DEFAULT .withInitialBackoff(RETRY_INITIAL_BACKOFF) .withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1) .withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration()); } } @StartBundle public void startBundle(StartBundleContext context) { batch = new ArrayList<>(); currentBatchSizeBytes = 0; } /** * Extracts the components that comprise the document address from the document using the * {@link FieldValueExtractFn} configured. This allows any or all of the index, type and * document id to be controlled on a per document basis. If none are provided then an empty * default of {@code {}} is returned. Sanitization of the index is performed, automatically * lower-casing the value as required by Elasticsearch. * * @param document the json from which the index, type and id may be extracted * @return the document address as JSON or the default * @throws IOException if the document cannot be parsed as JSON */ private String getDocumentMetadata(String document) throws IOException { if (spec.getIndexFn() != null || spec.getTypeFn() != null || spec.getIdFn() != null) { JsonNode parsedDocument = OBJECT_MAPPER.readTree(document); DocumentMetadata metadata = new DocumentMetadata( spec.getIndexFn() != null ? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument)) : null, spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null, spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null, spec.getUsePartialUpdate() ? DEFAULT_RETRY_ON_CONFLICT : null); return OBJECT_MAPPER.writeValueAsString(metadata); } else { return "{}"; } } private static String lowerCaseOrNull(String input) { return input == null ? null : input.toLowerCase(); } @ProcessElement public void processElement(ProcessContext context) throws Exception { String document = context.element(); String documentMetadata = getDocumentMetadata(document); if (spec.getUsePartialUpdate()) { batch.add( String.format( "{ \"update\" : %s }%n{ \"doc\" : %s, \"doc_as_upsert\" : true }%n", documentMetadata, document)); } else { batch.add(String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document)); } currentBatchSizeBytes += document.getBytes(StandardCharsets.UTF_8).length; if (batch.size() >= spec.getMaxBatchSize() || currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) { flushBatch(); } } @FinishBundle public void finishBundle(FinishBundleContext context) throws IOException, InterruptedException { flushBatch(); } private void flushBatch() throws IOException, InterruptedException { if (batch.isEmpty()) { return; } StringBuilder bulkRequest = new StringBuilder(); for (String json : batch) { bulkRequest.append(json); } batch.clear(); currentBatchSizeBytes = 0; Response response; String endPoint = String.format( "/%s/%s/_bulk", spec.getConnectionConfiguration().getIndex(), spec.getConnectionConfiguration().getType()); HttpEntity requestBody = new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON); response = restClient.performRequest("POST", endPoint, Collections.emptyMap(), requestBody); if (spec.getRetryConfiguration() != null && spec.getRetryConfiguration().getRetryPredicate().test(response)) { response = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody); } checkForErrors(response, backendVersion); } /** retry request based on retry configuration policy. */ private Response handleRetry( String method, String endpoint, Map<String, String> params, HttpEntity requestBody) throws IOException, InterruptedException { Response response; Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = retryBackoff.backoff(); int attempt = 0; while (BackOffUtils.next(sleeper, backoff)) { LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt)); response = restClient.performRequest(method, endpoint, params, requestBody); if (!spec.getRetryConfiguration().getRetryPredicate().test(response)) { return response; } } throw new IOException(String.format(RETRY_FAILED_LOG, attempt)); } @Teardown public void closeClient() throws IOException { if (restClient != null) { restClient.close(); } } } } static int getBackendVersion(ConnectionConfiguration connectionConfiguration) { try (RestClient restClient = connectionConfiguration.createClient()) { Response response = restClient.performRequest("GET", ""); JsonNode jsonNode = parseResponse(response); int backendVersion = Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1)); checkArgument( (backendVersion == 2 || backendVersion == 5), "The Elasticsearch version to connect to is %s.x. " + "This version of the ElasticsearchIO is only compatible with " + "Elasticsearch v5.x and v2.x", backendVersion); return backendVersion; } catch (IOException e) { throw (new IllegalArgumentException("Cannot get Elasticsearch version")); } } }
class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> { private final BoundedElasticsearchSource source; private RestClient restClient; private String current; private String scrollId; private ListIterator<String> batchIterator; private BoundedElasticsearchReader(BoundedElasticsearchSource source) { this.source = source; } @Override
class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> { private final BoundedElasticsearchSource source; private RestClient restClient; private String current; private String scrollId; private ListIterator<String> batchIterator; private BoundedElasticsearchReader(BoundedElasticsearchSource source) { this.source = source; } @Override
I agree that current implementation might be safer but seems like it significantly limits the usability of RestrictionTrackers as well. I think source authors are advanced users. So it might be OK to stay in the side of usability in some cases over safety. But this is debatable. Another option might be to add a method to the RestrictionTracker to mark progress without claiming. We have such a method in the old RangeTracker interface: https://github.com/apache/beam/blob/master/sdks/python/apache_beam/io/iobase.py#L393
public void makeProgress() throws Exception { progress += approximateRecordSize; if (progress > totalWork) { throw new IOException("Making progress out of range"); } }
progress += approximateRecordSize;
public void makeProgress() throws Exception { progress += approximateRecordSize; if (progress > totalWork) { throw new IOException("Making progress out of range"); } }
class BlockTracker extends OffsetRangeTracker { private long totalWork; private long progress; private long approximateRecordSize; public BlockTracker(OffsetRange range, long totalByteSize, long recordCount) { super(range); if (recordCount != 0) { this.approximateRecordSize = totalByteSize / recordCount; this.totalWork = approximateRecordSize * recordCount; this.progress = 0; } } @Override public Progress getProgress() { if (this.lastAttemptedOffset == null) { return Progress.from(0.0D, this.totalWork); } else { return Progress.from(progress, totalWork - progress); } } }
class BlockTracker extends OffsetRangeTracker { private long totalWork; private long progress; private long approximateRecordSize; public BlockTracker(OffsetRange range, long totalByteSize, long recordCount) { super(range); if (recordCount != 0) { this.approximateRecordSize = totalByteSize / recordCount; this.totalWork = approximateRecordSize * recordCount; this.progress = 0; } } @Override public Progress getProgress() { return super.getProgress(); } }
```suggestion throw ErrorCreator.createError(StringUtils.fromString("ballerina: error occurred while waiting for " + ``` This might be better
public void stop() { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, "stop"); } if (moduleStopped) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "stop"); } scheduler.poison(); try { schedulerThread.join(); } catch (InterruptedException e) { throw ErrorCreator.createError(StringUtils.fromString("ballerina: error occurred in while waiting for " + "scheduler thread to finish"), e); } invokeModuleStop(); moduleStopped = true; }
throw ErrorCreator.createError(StringUtils.fromString("ballerina: error occurred in while waiting for " +
public void stop() { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, "stop"); } if (moduleStopped) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "stop"); } scheduler.poison(); try { schedulerThread.join(); } catch (InterruptedException e) { throw ErrorCreator.createError(StringUtils.fromString("error occurred while waiting for the scheduler " + "thread to finish"), e); } invokeModuleStop(); moduleStopped = true; }
class BalRuntime extends Runtime { private final Scheduler scheduler; private final Module module; private boolean moduleInitialized = false; private boolean moduleStarted = false; private boolean moduleStopped = false; private Thread schedulerThread = null; public BalRuntime(Scheduler scheduler, Module module) { this.scheduler = scheduler; this.module = module; } public BalRuntime(Module module) { this.scheduler = new Scheduler(true); this.module = module; } public void init() { if (moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "init"); } invokeConfigInit(); schedulerThread = new Thread(scheduler::start); schedulerThread.start(); invokeMethodSync("$moduleInit"); moduleInitialized = true; } public void start() { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, "start"); } if (moduleStarted) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "start"); } invokeMethodSync("$moduleStart"); moduleStarted = true; } public void invokeMethodAsync(String functionName, Callback callback, Object... args) { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, functionName); } invokeMethod(functionName, callback, PredefinedTypes.TYPE_ANY, functionName, args); } /** * Invoke Object method asynchronously and sequentially. This method will ensure that the object methods are * invoked in the same thread where other object methods are executed. So, the methods will be executed * sequentially per object level. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param properties Set of properties for strand. * @param returnType Expected return type of this method. * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * <p> * This method needs to be called if object.getType().isIsolated() or * object.getType().isIsolated(methodName) returns false. */ public BFuture invokeMethodAsyncSequentially(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); scheduler.scheduleToObjectGroup(new Object[1], func, future); } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously and concurrently. Caller needs to ensure that no data race is possible for * the mutable state with given object method and with arguments. So, the method can be concurrently run with * different os threads. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param properties Set of properties for strand. * @param returnType Expected return type of this method. * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * <p> * This method needs to be called if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ public BFuture invokeMethodAsyncConcurrently(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); scheduler.schedule(new Object[1], func, future); } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously. This will schedule the function and block the strand. * This API checks whether the object or object method is isolated. So, if an object method is isolated, method * will be concurrently executed in different os threads. * <p> * Caller needs to ensure that no data race is possible for the mutable state with given arguments. So, the * method can be concurrently run with different os threads. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly creating strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notify once method execution done. * @param properties Set of properties for strand * @param returnType Expected return type of this method * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * @deprecated If caller can ensure that given object and object method is isolated and no data race is possible * for the mutable state with given arguments, use @invokeMethodAsyncConcurrently * otherwise @invokeMethodAsyncSequentially . * <p> * We can decide the object method isolation if and only if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ @Deprecated public BFuture invokeMethodAsync(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; ObjectType objectType = (ObjectType) TypeUtils.getImpliedType(objectVal.getType()); boolean isIsolated = objectType.isIsolated() && objectType.isIsolated(methodName); FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); if (isIsolated) { scheduler.schedule(new Object[1], func, future); } else { scheduler.scheduleToObjectGroup(new Object[1], func, future); } } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously. This will schedule the function and block the strand. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is optional * and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param args Ballerina function arguments. * @return the result of the function invocation. * @deprecated If caller can ensure that given object and object method is isolated and no data race is possible * for the mutable state with given arguments, use @invokeMethodAsyncConcurrently * otherwise @invokeMethodAsyncSequentially . * <p> * We can decide the object method isolation if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ @Deprecated public Object invokeMethodAsync(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Object... args) { return invokeMethodAsync(object, methodName, strandName, metadata, callback, null, PredefinedTypes.TYPE_NULL, args); } private void validateArgs(BObject object, String methodName) { if (object == null) { throw ErrorCreator.createError(StringUtils.fromString("object cannot be null")); } if (methodName == null) { throw ErrorCreator.createError(StringUtils.fromString("method name cannot be null")); } } public void registerListener(BObject listener) { scheduler.getRuntimeRegistry().registerListener(listener); } public void deregisterListener(BObject listener) { scheduler.getRuntimeRegistry().deregisterListener(listener); } public void registerStopHandler(BFunctionPointer<?, ?> stopHandler) { scheduler.getRuntimeRegistry().registerStopHandler(stopHandler); } private Function<?, ?> getFunction(Object[] argsWithDefaultValues, ObjectValue objectVal, String methodName) { Function<?, ?> func; if (argsWithDefaultValues.length == 1) { func = o -> objectVal.call((Strand) (((Object[]) o)[0]), methodName, argsWithDefaultValues[0]); } else { func = o -> objectVal.call((Strand) (((Object[]) o)[0]), methodName, argsWithDefaultValues); } return func; } private void invokeConfigInit() { Class<?> configClass = loadClass(CONFIGURATION_CLASS_NAME); ConfigDetails configDetails = LaunchUtils.getConfigurationDetails(); String funcName = Utils.encodeFunctionIdentifier("$configureInit"); try { final Method method = configClass.getDeclaredMethod(funcName, String[].class, Path[].class, String.class); method.invoke(null, new String[]{}, configDetails.paths, configDetails.configContent); } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { throw ErrorCreator.createError(StringUtils.fromString("configurable initialization failed due to " + RuntimeUtils.formatErrorMessage(e)), e); } } private void invokeModuleStop() { Class<?> initClass = loadClass(MODULE_INIT_CLASS_NAME); String funcName = Utils.encodeFunctionIdentifier("$moduleStop"); try { final Method method = initClass.getDeclaredMethod(funcName, RuntimeRegistry.class); method.invoke(null, scheduler.getRuntimeRegistry()); } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { throw ErrorCreator.createError(StringUtils.fromString("calling module stop failed due to " + RuntimeUtils.formatErrorMessage(e)), e); } } private Class<?> loadClass(String moduleInitClassName) { String initClassName = getFullQualifiedClassName(this.module, moduleInitClassName); Class<?> initClazz; try { initClazz = Class.forName(initClassName); } catch (Throwable e) { throw ErrorCreator.createError(StringUtils.fromString("failed to load configuration class :" + initClassName), e); } return initClazz; } private static String getFullQualifiedClassName(Module module, String className) { String orgName = module.getOrg(); String packageName = module.getName(); if (!DOT.equals(packageName)) { className = encodeNonFunctionIdentifier(packageName) + "." + module.getMajorVersion() + "." + className; } if (!ANON_ORG.equals(orgName)) { className = encodeNonFunctionIdentifier(orgName) + "." + className; } return className; } private void invokeMethodSync(String functionName) { final CountDownLatch latch = new CountDownLatch(1); SyncCallback callback = new SyncCallback(latch); invokeMethod(functionName, callback, PredefinedTypes.TYPE_NULL, functionName, new Object[1]); try { latch.await(); } catch (InterruptedException e) { throw ErrorCreator.createError(e); } if (callback.initError != null) { throw callback.initError; } } private void invokeMethod(String functionName, Callback callback, Type returnType, String strandName, Object... args) { ValueCreator valueCreator = ValueCreator.getValueCreator(ValueCreator.getLookupKey(module.getOrg(), module.getName(), module.getMajorVersion(), module.isTestPkg())); Function<?, ?> func = o -> valueCreator.call((Strand) (((Object[]) o)[0]), functionName, args); FutureValue future = scheduler.createFuture(null, callback, null, returnType, strandName, null); Object[] argsWithStrand = new Object[args.length + 1]; argsWithStrand[0] = future.strand; System.arraycopy(args, 0, argsWithStrand, 1, args.length); scheduler.schedule(argsWithStrand, func, future); } /** * This class used to handle ballerina function invocation synchronously. * * @since 2201.9.1 */ static class SyncCallback implements Callback { CountDownLatch latch; BError initError; public SyncCallback(CountDownLatch latch) { this.latch = latch; } @Override public void notifySuccess(Object result) { latch.countDown(); } @Override public void notifyFailure(BError error) { latch.countDown(); initError = error; } } }
class BalRuntime extends Runtime { private final Scheduler scheduler; private final Module module; private boolean moduleInitialized = false; private boolean moduleStarted = false; private boolean moduleStopped = false; private Thread schedulerThread = null; public BalRuntime(Scheduler scheduler, Module module) { this.scheduler = scheduler; this.module = module; } public BalRuntime(Module module) { this.scheduler = new Scheduler(true); this.module = module; } public void init() { if (moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "init"); } invokeConfigInit(); schedulerThread = new Thread(scheduler::start); schedulerThread.start(); invokeMethodSync("$moduleInit"); moduleInitialized = true; } public void start() { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, "start"); } if (moduleStarted) { throw ErrorHelper.getRuntimeException(ErrorCodes.FUNCTION_ALREADY_CALLED, "start"); } invokeMethodSync("$moduleStart"); moduleStarted = true; } public void invokeMethodAsync(String functionName, Callback callback, Object... args) { if (!moduleInitialized) { throw ErrorHelper.getRuntimeException(ErrorCodes.INVALID_FUNCTION_INVOCATION_BEFORE_MODULE_INIT, functionName); } invokeMethod(functionName, callback, PredefinedTypes.TYPE_ANY, functionName, args); } /** * Invoke Object method asynchronously and sequentially. This method will ensure that the object methods are * invoked in the same thread where other object methods are executed. So, the methods will be executed * sequentially per object level. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param properties Set of properties for strand. * @param returnType Expected return type of this method. * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * <p> * This method needs to be called if object.getType().isIsolated() or * object.getType().isIsolated(methodName) returns false. */ public BFuture invokeMethodAsyncSequentially(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); scheduler.scheduleToObjectGroup(new Object[1], func, future); } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously and concurrently. Caller needs to ensure that no data race is possible for * the mutable state with given object method and with arguments. So, the method can be concurrently run with * different os threads. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param properties Set of properties for strand. * @param returnType Expected return type of this method. * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * <p> * This method needs to be called if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ public BFuture invokeMethodAsyncConcurrently(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); scheduler.schedule(new Object[1], func, future); } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously. This will schedule the function and block the strand. * This API checks whether the object or object method is isolated. So, if an object method is isolated, method * will be concurrently executed in different os threads. * <p> * Caller needs to ensure that no data race is possible for the mutable state with given arguments. So, the * method can be concurrently run with different os threads. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly creating strand which is used to execute the function pointer. This is * optional and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notify once method execution done. * @param properties Set of properties for strand * @param returnType Expected return type of this method * @param args Ballerina function arguments. * @return {@link BFuture} containing return value for executing this method. * @deprecated If caller can ensure that given object and object method is isolated and no data race is possible * for the mutable state with given arguments, use @invokeMethodAsyncConcurrently * otherwise @invokeMethodAsyncSequentially . * <p> * We can decide the object method isolation if and only if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ @Deprecated public BFuture invokeMethodAsync(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Map<String, Object> properties, Type returnType, Object... args) { try { validateArgs(object, methodName); ObjectValue objectVal = (ObjectValue) object; ObjectType objectType = (ObjectType) TypeUtils.getImpliedType(objectVal.getType()); boolean isIsolated = objectType.isIsolated() && objectType.isIsolated(methodName); FutureValue future = scheduler.createFuture(null, callback, properties, returnType, strandName, metadata); AsyncUtils.getArgsWithDefaultValues(scheduler, objectVal, methodName, new Callback() { @Override public void notifySuccess(Object result) { Function<?, ?> func = getFunction((Object[]) result, objectVal, methodName); if (isIsolated) { scheduler.schedule(new Object[1], func, future); } else { scheduler.scheduleToObjectGroup(new Object[1], func, future); } } @Override public void notifyFailure(BError error) { callback.notifyFailure(error); } }, args); return future; } catch (BError e) { callback.notifyFailure(e); } catch (Throwable e) { callback.notifyFailure(ErrorCreator.createError(StringUtils.fromString(e.getMessage()))); } return null; } /** * Invoke Object method asynchronously. This will schedule the function and block the strand. * * @param object Object Value. * @param methodName Name of the method. * @param strandName Name for newly created strand which is used to execute the function pointer. This is optional * and can be null. * @param metadata Meta data of new strand. * @param callback Callback which will get notified once the method execution is done. * @param args Ballerina function arguments. * @return the result of the function invocation. * @deprecated If caller can ensure that given object and object method is isolated and no data race is possible * for the mutable state with given arguments, use @invokeMethodAsyncConcurrently * otherwise @invokeMethodAsyncSequentially . * <p> * We can decide the object method isolation if both object.getType().isIsolated() and * object.getType().isIsolated(methodName) returns true. */ @Deprecated public Object invokeMethodAsync(BObject object, String methodName, String strandName, StrandMetadata metadata, Callback callback, Object... args) { return invokeMethodAsync(object, methodName, strandName, metadata, callback, null, PredefinedTypes.TYPE_NULL, args); } private void validateArgs(BObject object, String methodName) { if (object == null) { throw ErrorCreator.createError(StringUtils.fromString("object cannot be null")); } if (methodName == null) { throw ErrorCreator.createError(StringUtils.fromString("method name cannot be null")); } } public void registerListener(BObject listener) { scheduler.getRuntimeRegistry().registerListener(listener); } public void deregisterListener(BObject listener) { scheduler.getRuntimeRegistry().deregisterListener(listener); } public void registerStopHandler(BFunctionPointer<?, ?> stopHandler) { scheduler.getRuntimeRegistry().registerStopHandler(stopHandler); } private Function<?, ?> getFunction(Object[] argsWithDefaultValues, ObjectValue objectVal, String methodName) { Function<?, ?> func; if (argsWithDefaultValues.length == 1) { func = o -> objectVal.call((Strand) (((Object[]) o)[0]), methodName, argsWithDefaultValues[0]); } else { func = o -> objectVal.call((Strand) (((Object[]) o)[0]), methodName, argsWithDefaultValues); } return func; } private void invokeConfigInit() { Class<?> configClass = loadClass(CONFIGURATION_CLASS_NAME); ConfigDetails configDetails = LaunchUtils.getConfigurationDetails(); String funcName = Utils.encodeFunctionIdentifier("$configureInit"); try { final Method method = configClass.getDeclaredMethod(funcName, String[].class, Path[].class, String.class); method.invoke(null, new String[]{}, configDetails.paths, configDetails.configContent); } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { throw ErrorCreator.createError(StringUtils.fromString("configurable initialization failed due to " + RuntimeUtils.formatErrorMessage(e)), e); } } private void invokeModuleStop() { Class<?> initClass = loadClass(MODULE_INIT_CLASS_NAME); String funcName = Utils.encodeFunctionIdentifier("$moduleStop"); try { final Method method = initClass.getDeclaredMethod(funcName, RuntimeRegistry.class); method.invoke(null, scheduler.getRuntimeRegistry()); } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { throw ErrorCreator.createError(StringUtils.fromString("failed to stop the module due to " + RuntimeUtils.formatErrorMessage(e)), e); } } private Class<?> loadClass(String moduleInitClassName) { String initClassName = getFullQualifiedClassName(this.module, moduleInitClassName); Class<?> initClazz; try { initClazz = Class.forName(initClassName); } catch (Throwable e) { throw ErrorCreator.createError(StringUtils.fromString("failed to load configuration class :" + initClassName), e); } return initClazz; } private static String getFullQualifiedClassName(Module module, String className) { String orgName = module.getOrg(); String packageName = module.getName(); if (!DOT.equals(packageName)) { className = encodeNonFunctionIdentifier(packageName) + "." + module.getMajorVersion() + "." + className; } if (!ANON_ORG.equals(orgName)) { className = encodeNonFunctionIdentifier(orgName) + "." + className; } return className; } private void invokeMethodSync(String functionName) { final CountDownLatch latch = new CountDownLatch(1); SyncCallback callback = new SyncCallback(latch); invokeMethod(functionName, callback, PredefinedTypes.TYPE_NULL, functionName, new Object[1]); try { latch.await(); } catch (InterruptedException e) { throw ErrorCreator.createError(e); } if (callback.initError != null) { throw callback.initError; } } private void invokeMethod(String functionName, Callback callback, Type returnType, String strandName, Object... args) { ValueCreator valueCreator = ValueCreator.getValueCreator(ValueCreator.getLookupKey(module.getOrg(), module.getName(), module.getMajorVersion(), module.isTestPkg())); Function<?, ?> func = o -> valueCreator.call((Strand) (((Object[]) o)[0]), functionName, args); FutureValue future = scheduler.createFuture(null, callback, null, returnType, strandName, null); Object[] argsWithStrand = new Object[args.length + 1]; argsWithStrand[0] = future.strand; System.arraycopy(args, 0, argsWithStrand, 1, args.length); scheduler.schedule(argsWithStrand, func, future); } /** * This class used to handle ballerina function invocation synchronously. * * @since 2201.9.1 */ static class SyncCallback implements Callback { CountDownLatch latch; BError initError; public SyncCallback(CountDownLatch latch) { this.latch = latch; } @Override public void notifySuccess(Object result) { latch.countDown(); } @Override public void notifyFailure(BError error) { latch.countDown(); initError = error; } } }
That should be in `KeycloakRealmResourceManager` ? I see, please introduce a dedicated client id, same as I did for the jwt. though it has not been merget yet :-)
public void testRPInitiatedLogout() throws IOException, InterruptedException { Keycloak keycloak = KeycloakRealmResourceManager.createKeycloakClient(); RealmResource realm = keycloak.realm(KeycloakRealmResourceManager.KEYCLOAK_REALM); RealmRepresentation representation = realm .toRepresentation(); Integer ssoSessionMaxLifespan = representation.getSsoSessionMaxLifespan(); representation.setSsoSessionMaxLifespan(20); Integer accessCodeLifespan = representation.getAccessCodeLifespan(); representation.setAccessTokenLifespan(5); realm.update(representation); try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertTrue(page.asText().contains("Tenant Logout")); assertNotNull(getSessionCookie(webClient)); page = webClient.getPage("http: assertTrue(page.asText().contains("You were logged out")); assertNull(getSessionCookie(webClient)); page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertTrue(page.asText().contains("Tenant Logout")); await().atMost(10, TimeUnit.SECONDS) .pollInterval(Duration.ofSeconds(5)) .until(new Callable<Boolean>() { @Override public Boolean call() throws Exception { webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: assertEquals(200, webResponse.getStatusCode()); assertTrue(webResponse.getContentAsString().contains("Tenant Logout")); assertNotNull(getSessionCookie(webClient)); return true; } }); } finally { representation.setSsoSessionMaxLifespan(ssoSessionMaxLifespan); representation.setAccessTokenLifespan(accessCodeLifespan); realm.update(representation); } }
Keycloak keycloak = KeycloakRealmResourceManager.createKeycloakClient();
public void testRPInitiatedLogout() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to logout-realm", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertTrue(page.asText().contains("Tenant Logout")); assertNotNull(getSessionCookie(webClient)); page = webClient.getPage("http: assertTrue(page.asText().contains("You were logged out")); assertNull(getSessionCookie(webClient)); page = webClient.getPage("http: assertEquals("Log in to logout-realm", page.getTitleText()); loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertTrue(page.asText().contains("Tenant Logout")); Cookie sessionCookie = getSessionCookie(webClient); assertNotNull(sessionCookie); String idToken = getIdToken(sessionCookie); await().atMost(10, TimeUnit.SECONDS) .pollInterval(Duration.ofSeconds(1)) .until(new Callable<Boolean>() { @Override public Boolean call() throws Exception { webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: Cookie newSessionCookie = getSessionCookie(webClient); assertNotNull(newSessionCookie); return !idToken.equals(getIdToken(newSessionCookie)); } }); page = webClient.getPage("http: assertTrue(page.asText().contains("Tenant Logout")); assertNotNull(getSessionCookie(webClient)); await().atMost(20, TimeUnit.SECONDS) .pollInterval(Duration.ofSeconds(1)) .until(new Callable<Boolean>() { @Override public Boolean call() throws Exception { webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: int statusCode = webResponse.getStatusCode(); if (statusCode == 302) { assertNull(getSessionCookie(webClient)); return true; } return false; } }); webClient.getOptions().setRedirectEnabled(true); page = webClient.getPage("http: assertNull(getSessionCookie(webClient)); assertEquals("Log in to logout-realm", page.getTitleText()); } }
class CodeFlowTest { @Test public void testCodeFlowNoConsent() throws IOException { try (final WebClient webClient = createWebClient()) { webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: verifyLocationHeader(webClient, webResponse.getResponseHeaderValue("location")); webClient.getOptions().setRedirectEnabled(true); HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("Welcome to Test App", page.getTitleText(), "A second request should not redirect and just re-authenticate the user"); webClient.getCookieManager().clearCookies(); } } private void verifyLocationHeader(WebClient webClient, String loc) { assertTrue(loc.startsWith("http: assertTrue(loc.contains("redirect_uri=http%3A%2F%2Flocalhost%3A8081%2Fweb-app")); assertTrue(loc.contains("state=" + getStateCookieStateParam(webClient))); assertTrue(loc.contains("scope=openid+profile+email+phone")); assertTrue(loc.contains("response_type=code")); assertTrue(loc.contains("client_id=quarkus-app")); assertTrue(loc.contains("max-age=60")); } @Test public void testTokenTimeoutLogout() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); assertNull(getStateCookie(webClient)); Cookie sessionCookie = getSessionCookie(webClient); assertNotNull(sessionCookie); assertEquals("/", sessionCookie.getPath()); Thread.sleep(5000); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: assertEquals(302, webResponse.getStatusCode()); assertNull(getSessionCookie(webClient)); webClient.getOptions().setRedirectEnabled(true); page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); webClient.getCookieManager().clearCookies(); } } @Test @Test public void testIdTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("alice", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionWithoutRestoredPath() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("callback:alice", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionWithoutRestoredPathDifferentRoot() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("web-app2:alice", page.getBody().asText()); page = webClient.getPage("http: assertEquals("web-app2:alice", page.getBody().asText()); assertNull(getStateCookie(webClient)); Cookie sessionCookie = getSessionCookie(webClient); assertNotNull(sessionCookie); assertEquals("/web-app2", sessionCookie.getPath()); Thread.sleep(5000); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: assertEquals(302, webResponse.getStatusCode()); assertNull(getSessionCookie(webClient)); webClient.getOptions().setRedirectEnabled(true); page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAuthenticationCompletionFailedNoStateCookie() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); try { loginForm.getInputByName("login").click(); fail("401 status error is expected"); } catch (FailingHttpStatusCodeException ex) { assertEquals(401, ex.getStatusCode()); } webClient.getCookieManager().clearCookies(); } } @Test public void testAuthenticationCompletionFailedWrongRedirectUri() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); try { page = loginForm.getInputByName("login").click(); fail("401 status error is expected: " + page.getBody().asText()); } catch (FailingHttpStatusCodeException ex) { assertEquals(401, ex.getStatusCode()); } webClient.getCookieManager().clearCookies(); } } @Test public void testAccessTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("AT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAccessAndRefreshTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("RT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAccessAndRefreshTokenInjectionWithoutIndexHtml() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/web-app/refresh", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("RT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testNoCodeFlowUnprotected() { RestAssured.when().get("/public-web-app/access") .then() .statusCode(200) .body(Matchers.equalTo("no user")); } private WebClient createWebClient() { WebClient webClient = new WebClient(); webClient.setCssErrorHandler(new SilentCssErrorHandler()); return webClient; } private Cookie getSessionCookie(WebClient webClient) { return webClient.getCookieManager().getCookie("q_session"); } private Cookie getStateCookie(WebClient webClient) { return webClient.getCookieManager().getCookie("q_auth"); } private String getStateCookieStateParam(WebClient webClient) { return getStateCookie(webClient).getValue().split("___")[0]; } private String getStateCookieSavedPath(WebClient webClient) { String[] parts = getStateCookie(webClient).getValue().split("___"); return parts.length == 2 ? parts[1] : null; } }
class CodeFlowTest { @Test public void testCodeFlowNoConsent() throws IOException { try (final WebClient webClient = createWebClient()) { webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: verifyLocationHeader(webClient, webResponse.getResponseHeaderValue("location")); webClient.getOptions().setRedirectEnabled(true); HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("Welcome to Test App", page.getTitleText(), "A second request should not redirect and just re-authenticate the user"); webClient.getCookieManager().clearCookies(); } } private void verifyLocationHeader(WebClient webClient, String loc) { assertTrue(loc.startsWith("http: assertTrue(loc.contains("redirect_uri=http%3A%2F%2Flocalhost%3A8081%2Fweb-app")); assertTrue(loc.contains("state=" + getStateCookieStateParam(webClient))); assertTrue(loc.contains("scope=openid+profile+email+phone")); assertTrue(loc.contains("response_type=code")); assertTrue(loc.contains("client_id=quarkus-app")); assertTrue(loc.contains("max-age=60")); } @Test public void testTokenTimeoutLogout() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); assertNull(getStateCookie(webClient)); Cookie sessionCookie = getSessionCookie(webClient); assertNotNull(sessionCookie); assertEquals("/", sessionCookie.getPath()); Thread.sleep(5000); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: assertEquals(302, webResponse.getStatusCode()); assertNull(getSessionCookie(webClient)); webClient.getOptions().setRedirectEnabled(true); page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); webClient.getCookieManager().clearCookies(); } } @Test @Test public void testIdTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("alice", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionWithoutRestoredPath() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("callback:alice", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionJwtMethod() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("callback-jwt:alice", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionJwtMethodButPostMethodUsed() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); try { loginForm.getInputByName("login").click(); fail("401 status error is expected"); } catch (FailingHttpStatusCodeException ex) { assertEquals(401, ex.getStatusCode()); } webClient.getCookieManager().clearCookies(); } } @Test public void testIdTokenInjectionWithoutRestoredPathDifferentRoot() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertNotNull(getStateCookieStateParam(webClient)); assertNull(getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("web-app2:alice", page.getBody().asText()); page = webClient.getPage("http: assertEquals("web-app2:alice", page.getBody().asText()); assertNull(getStateCookie(webClient)); Cookie sessionCookie = getSessionCookie(webClient); assertNotNull(sessionCookie); assertEquals("/web-app2", sessionCookie.getPath()); Thread.sleep(5000); webClient.getOptions().setRedirectEnabled(false); WebResponse webResponse = webClient .loadWebResponse(new WebRequest(URI.create("http: assertEquals(302, webResponse.getStatusCode()); assertNull(getSessionCookie(webClient)); webClient.getOptions().setRedirectEnabled(true); page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAuthenticationCompletionFailedNoStateCookie() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); try { loginForm.getInputByName("login").click(); fail("401 status error is expected"); } catch (FailingHttpStatusCodeException ex) { assertEquals(401, ex.getStatusCode()); } webClient.getCookieManager().clearCookies(); } } @Test public void testAuthenticationCompletionFailedWrongRedirectUri() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); try { page = loginForm.getInputByName("login").click(); fail("401 status error is expected: " + page.getBody().asText()); } catch (FailingHttpStatusCodeException ex) { assertEquals(401, ex.getStatusCode()); } webClient.getCookieManager().clearCookies(); } } @Test public void testAccessTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("AT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAccessAndRefreshTokenInjection() throws IOException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/index.html", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("Welcome to Test App", page.getTitleText()); page = webClient.getPage("http: assertEquals("RT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testAccessAndRefreshTokenInjectionWithoutIndexHtml() throws IOException, InterruptedException { try (final WebClient webClient = createWebClient()) { HtmlPage page = webClient.getPage("http: assertEquals("/web-app/refresh", getStateCookieSavedPath(webClient)); assertEquals("Log in to quarkus", page.getTitleText()); HtmlForm loginForm = page.getForms().get(0); loginForm.getInputByName("username").setValueAttribute("alice"); loginForm.getInputByName("password").setValueAttribute("alice"); page = loginForm.getInputByName("login").click(); assertEquals("RT injected", page.getBody().asText()); webClient.getCookieManager().clearCookies(); } } @Test public void testNoCodeFlowUnprotected() { RestAssured.when().get("/public-web-app/access") .then() .statusCode(200) .body(Matchers.equalTo("no user")); } private WebClient createWebClient() { WebClient webClient = new WebClient(); webClient.setCssErrorHandler(new SilentCssErrorHandler()); return webClient; } private Cookie getStateCookie(WebClient webClient) { return webClient.getCookieManager().getCookie("q_auth"); } private String getStateCookieStateParam(WebClient webClient) { return getStateCookie(webClient).getValue().split("___")[0]; } private String getStateCookieSavedPath(WebClient webClient) { String[] parts = getStateCookie(webClient).getValue().split("___"); return parts.length == 2 ? parts[1] : null; } private Cookie getSessionCookie(WebClient webClient) { return webClient.getCookieManager().getCookie("q_session"); } private String getIdToken(Cookie sessionCookie) { return sessionCookie.getValue().split("___")[0]; } }
What is the argument for changing the default approach for a more knob based one ? Maybe is better to preserve the default and only overwrite it if this is parametrized.
public Partition[] getPartitions() { try { List<? extends Source<T>> partitionedSources = source.split(bundleSize, options.get()); Partition[] partitions = new SourcePartition[partitionedSources.size()]; for (int i = 0; i < partitionedSources.size(); i++) { partitions[i] = new SourcePartition<>(id(), i, partitionedSources.get(i)); } return partitions; } catch (Exception e) { throw new RuntimeException( "Failed to create partitions for source " + source.getClass().getSimpleName(), e); } }
List<? extends Source<T>> partitionedSources = source.split(bundleSize, options.get());
public Partition[] getPartitions() { try { List<? extends Source<T>> partitionedSources; if (bundleSize > 0) { partitionedSources = source.split(bundleSize, options.get()); } else { long desiredSizeBytes = DEFAULT_BUNDLE_SIZE; try { desiredSizeBytes = source.getEstimatedSizeBytes(options.get()) / numPartitions; } catch (Exception e) { LOG.warn( "Failed to get estimated bundle size for source {}, using default bundle " + "size of {} bytes.", source, DEFAULT_BUNDLE_SIZE); } partitionedSources = source.split(desiredSizeBytes, options.get()); } Partition[] partitions = new SourcePartition[partitionedSources.size()]; for (int i = 0; i < partitionedSources.size(); i++) { partitions[i] = new SourcePartition<>(id(), i, partitionedSources.get(i)); } return partitions; } catch (Exception e) { throw new RuntimeException( "Failed to create partitions for source " + source.getClass().getSimpleName(), e); } }
class Bounded<T> extends RDD<WindowedValue<T>> { private static final Logger LOG = LoggerFactory.getLogger(SourceRDD.Bounded.class); private final BoundedSource<T> source; private final SerializablePipelineOptions options; private final long bundleSize; private final String stepName; private final Accumulator<MetricsContainerStepMap> metricsAccum; private static final scala.collection.immutable.Seq<Dependency<?>> NIL = JavaConversions.asScalaBuffer(Collections.<Dependency<?>>emptyList()).toList(); public Bounded( SparkContext sc, BoundedSource<T> source, SerializablePipelineOptions options, String stepName) { super(sc, NIL, JavaSparkContext$.MODULE$.fakeClassTag()); this.source = source; this.options = options; this.bundleSize = options.get().as(SparkPipelineOptions.class).getBundleSize(); checkArgument(this.bundleSize > 0, "Bundle size must be greater than zero."); this.stepName = stepName; this.metricsAccum = MetricsAccumulator.getInstance(); } @Override private BoundedSource.BoundedReader<T> createReader(SourcePartition<T> partition) { try { return ((BoundedSource<T>) partition.source).createReader(options.get()); } catch (IOException e) { throw new RuntimeException("Failed to create reader from a BoundedSource.", e); } } @Override public scala.collection.Iterator<WindowedValue<T>> compute( final Partition split, final TaskContext context) { final MetricsContainer metricsContainer = metricsAccum.localValue().getContainer(stepName); @SuppressWarnings("unchecked") final BoundedSource.BoundedReader<T> reader = createReader((SourcePartition<T>) split); final Iterator<WindowedValue<T>> readerIterator = new ReaderToIteratorAdapter<>(metricsContainer, reader); return new InterruptibleIterator<>(context, JavaConversions.asScalaIterator(readerIterator)); } /** * Exposes an <code>Iterator</code>&lt;{@link WindowedValue}&gt; interface on top of a {@link * Source.Reader}. * * <p><code>hasNext</code> is idempotent and returns <code>true</code> iff further items are * available for reading using the underlying reader. Consequently, when the reader is closed, * or when the reader has no further elements available (i.e, {@link Source.Reader * returned <code>false</code>), <code>hasNext</code> returns <code>false</code>. * * <p>Since this is a read-only iterator, an attempt to call <code>remove</code> will throw an * <code>UnsupportedOperationException</code>. */ @VisibleForTesting static class ReaderToIteratorAdapter<T> implements Iterator<WindowedValue<T>> { private static final boolean FAILED_TO_OBTAIN_NEXT = false; private static final boolean SUCCESSFULLY_OBTAINED_NEXT = true; private final MetricsContainer metricsContainer; private final Source.Reader<T> reader; private boolean started = false; private boolean closed = false; private WindowedValue<T> next = null; ReaderToIteratorAdapter( final MetricsContainer metricsContainer, final Source.Reader<T> reader) { this.metricsContainer = metricsContainer; this.reader = reader; } private boolean tryProduceNext() { try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(metricsContainer)) { if (closed) { return FAILED_TO_OBTAIN_NEXT; } else { checkState(next == null, "unexpected non-null value for next"); if (seekNext()) { next = WindowedValue.timestampedValueInGlobalWindow( reader.getCurrent(), reader.getCurrentTimestamp()); return SUCCESSFULLY_OBTAINED_NEXT; } else { close(); return FAILED_TO_OBTAIN_NEXT; } } } catch (final Exception e) { throw new RuntimeException("Failed to read data.", e); } } private void close() { closed = true; try { reader.close(); } catch (final IOException e) { throw new RuntimeException(e); } } private boolean seekNext() throws IOException { if (!started) { started = true; return reader.start(); } else { return !closed && reader.advance(); } } private WindowedValue<T> consumeCurrent() { if (next == null) { throw new NoSuchElementException(); } else { final WindowedValue<T> current = next; next = null; return current; } } private WindowedValue<T> consumeNext() { if (next == null) { tryProduceNext(); } return consumeCurrent(); } @Override public boolean hasNext() { return next != null || tryProduceNext(); } @Override public WindowedValue<T> next() { return consumeNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } } }
class Bounded<T> extends RDD<WindowedValue<T>> { private static final Logger LOG = LoggerFactory.getLogger(SourceRDD.Bounded.class); private final BoundedSource<T> source; private final SerializablePipelineOptions options; private final int numPartitions; private final long bundleSize; private final String stepName; private final Accumulator<MetricsContainerStepMap> metricsAccum; private static final scala.collection.immutable.Seq<Dependency<?>> NIL = JavaConversions.asScalaBuffer(Collections.<Dependency<?>>emptyList()).toList(); public Bounded( SparkContext sc, BoundedSource<T> source, SerializablePipelineOptions options, String stepName) { super(sc, NIL, JavaSparkContext$.MODULE$.fakeClassTag()); this.source = source; this.options = options; this.numPartitions = sc.defaultParallelism(); checkArgument(this.numPartitions > 0, "Number of partitions must be greater than zero."); this.bundleSize = options.get().as(SparkPipelineOptions.class).getBundleSize(); this.stepName = stepName; this.metricsAccum = MetricsAccumulator.getInstance(); } private static final long DEFAULT_BUNDLE_SIZE = 64L * 1024L * 1024L; @Override private BoundedSource.BoundedReader<T> createReader(SourcePartition<T> partition) { try { return ((BoundedSource<T>) partition.source).createReader(options.get()); } catch (IOException e) { throw new RuntimeException("Failed to create reader from a BoundedSource.", e); } } @Override public scala.collection.Iterator<WindowedValue<T>> compute( final Partition split, final TaskContext context) { final MetricsContainer metricsContainer = metricsAccum.localValue().getContainer(stepName); @SuppressWarnings("unchecked") final BoundedSource.BoundedReader<T> reader = createReader((SourcePartition<T>) split); final Iterator<WindowedValue<T>> readerIterator = new ReaderToIteratorAdapter<>(metricsContainer, reader); return new InterruptibleIterator<>(context, JavaConversions.asScalaIterator(readerIterator)); } /** * Exposes an <code>Iterator</code>&lt;{@link WindowedValue}&gt; interface on top of a {@link * Source.Reader}. * * <p><code>hasNext</code> is idempotent and returns <code>true</code> iff further items are * available for reading using the underlying reader. Consequently, when the reader is closed, * or when the reader has no further elements available (i.e, {@link Source.Reader * returned <code>false</code>), <code>hasNext</code> returns <code>false</code>. * * <p>Since this is a read-only iterator, an attempt to call <code>remove</code> will throw an * <code>UnsupportedOperationException</code>. */ @VisibleForTesting static class ReaderToIteratorAdapter<T> implements Iterator<WindowedValue<T>> { private static final boolean FAILED_TO_OBTAIN_NEXT = false; private static final boolean SUCCESSFULLY_OBTAINED_NEXT = true; private final MetricsContainer metricsContainer; private final Source.Reader<T> reader; private boolean started = false; private boolean closed = false; private WindowedValue<T> next = null; ReaderToIteratorAdapter( final MetricsContainer metricsContainer, final Source.Reader<T> reader) { this.metricsContainer = metricsContainer; this.reader = reader; } private boolean tryProduceNext() { try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(metricsContainer)) { if (closed) { return FAILED_TO_OBTAIN_NEXT; } else { checkState(next == null, "unexpected non-null value for next"); if (seekNext()) { next = WindowedValue.timestampedValueInGlobalWindow( reader.getCurrent(), reader.getCurrentTimestamp()); return SUCCESSFULLY_OBTAINED_NEXT; } else { close(); return FAILED_TO_OBTAIN_NEXT; } } } catch (final Exception e) { throw new RuntimeException("Failed to read data.", e); } } private void close() { closed = true; try { reader.close(); } catch (final IOException e) { throw new RuntimeException(e); } } private boolean seekNext() throws IOException { if (!started) { started = true; return reader.start(); } else { return !closed && reader.advance(); } } private WindowedValue<T> consumeCurrent() { if (next == null) { throw new NoSuchElementException(); } else { final WindowedValue<T> current = next; next = null; return current; } } private WindowedValue<T> consumeNext() { if (next == null) { tryProduceNext(); } return consumeCurrent(); } @Override public boolean hasNext() { return next != null || tryProduceNext(); } @Override public WindowedValue<T> next() { return consumeNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } } }
Perspectives: 1. Bug risks: - In the revised code, there are no apparent bug risks. 2. Naming: - The method `getPartitionKeysByValue` can be renamed to something more descriptive, such as `fetchPartitionsByValues`. 3. Code style and best practices: - The import statements at the top of the file could be organized in a more readable manner. - It's generally recommended to avoid wildcard imports (`import static ... *`) and instead import specific classes or methods individually. 4. Compatibility: - No specific issues regarding compatibility. 5. Simplicity: - The revised code is straightforward and easy to understand. 6. Optimization points: - In the `partitionExists` method, when the `HiveTable` object has a boolean type partition column, all partition keys are fetched from the client. However, we can optimize this by fetching only the relevant partition key using `getPartitionNamesByFilter`. - Additionally, instead of using `contains` to check if the expected partition key exists, it would be more efficient to use a `HashSet` or `HashSet.contains` for better performance. Revised code with improvements implemented: ```java import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.getOnlyElement; import static com.starrocks.connector.PartitionUtil.toHivePartitionName; // Import specific classes instead of wildcard import import com.starrocks.connector.hive.HiveMetastoreApiConverter; import com.starrocks.connector.hive.HiveTable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class YourClass { // ... @Override public boolean partitionExists(Table table, List<String> partitionValues) { HiveTable hiveTable = (HiveTable) table; String dbName = hiveTable.getDbName(); String tableName = hiveTable.getTableName(); if (!hiveTable.hasBooleanTypePartitionColumn()) { return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty(); } else { Set<String> allPartitionNames = new HashSet<>(client.getPartitionNamesByFilter( dbName, tableName, hiveTable.getPartitionColumnNames(), partitionValues )); String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues); return allPartitionNames.contains(hivePartitionName); } } // ... } ``` Note: In the revised code, some parts are assumed or missing due to incomplete context. Make sure to adapt the changes accordingly and verify their correctness within the surrounding codebase.
public Partition getPartition(String dbName, String tblName, List<String> partitionValues) { StorageDescriptor sd; Map<String, String> params; if (partitionValues.size() > 0) { org.apache.hadoop.hive.metastore.api.Partition partition = client.getPartition(dbName, tblName, partitionValues); sd = partition.getSd(); params = partition.getParameters(); } else { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); sd = table.getSd(); params = table.getParameters(); } return HiveMetastoreApiConverter.toPartition(sd, params); }
Map<String, String> params;
public Partition getPartition(String dbName, String tblName, List<String> partitionValues) { StorageDescriptor sd; Map<String, String> params; if (partitionValues.size() > 0) { org.apache.hadoop.hive.metastore.api.Partition partition = client.getPartition(dbName, tblName, partitionValues); sd = partition.getSd(); params = partition.getParameters(); } else { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); sd = table.getSd(); params = table.getParameters(); } return HiveMetastoreApiConverter.toPartition(sd, params); }
class HiveMetastore implements IHiveMetastore { private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class); private final HiveMetaClient client; private final String catalogName; public HiveMetastore(HiveMetaClient client, String catalogName) { this.client = client; this.catalogName = catalogName; } @Override public List<String> getAllDatabaseNames() { return client.getAllDatabaseNames(); } @Override public void createDb(String dbName, Map<String, String> properties) { String location = properties.getOrDefault(LOCATION_PROPERTY, ""); long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); Database database = new Database(dbId, dbName, location); client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database)); } @Override public void dropDb(String dbName, boolean deleteData) { client.dropDatabase(dbName, deleteData); } @Override public List<String> getAllTableNames(String dbName) { return client.getAllTableNames(dbName); } @Override public Database getDb(String dbName) { org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName); return HiveMetastoreApiConverter.toDatabase(db); } @Override public void createTable(String dbName, Table table) { org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table); client.createTable(hiveTable); } @Override public void dropTable(String dbName, String tableName) { client.dropTable(dbName, tableName); } public Table getTable(String dbName, String tableName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName); StorageDescriptor sd = table.getSd(); if (sd == null) { throw new StarRocksConnectorException("Table is missing storage descriptor"); } if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) { validateHiveTableType(table.getTableType()); if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) { return HiveMetastoreApiConverter.toHiveView(table, catalogName); } else { return HiveMetastoreApiConverter.toHiveTable(table, catalogName); } } else { return HiveMetastoreApiConverter.toHudiTable(table, catalogName); } } @Override public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) { if (partitionValues.isEmpty()) { return client.getPartitionKeys(dbName, tableName); } else { List<String> partitionValuesStr = partitionValues.stream() .map(v -> v.orElse("")).collect(Collectors.toList()); return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr); } } @Override public boolean partitionExists(Table table, List<String> partitionValues) { HiveTable hiveTable = (HiveTable) table; String dbName = hiveTable.getDbName(); String tableName = hiveTable.getTableName(); if (!hiveTable.hasBooleanTypePartitionColumn()) { return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty(); } else { List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName); String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues); return allPartitionNames.contains(hivePartitionName); } } @Override public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>(); for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) { int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size()); List<String> namesPerRPC = partitionNames.subList(start, end); List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC = client.getPartitionsByNames(dbName, tblName, namesPerRPC); partitions.addAll(partsPerRPC); } Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream() .collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues)); Map<List<String>, Partition> partitionValuesToPartition = partitions.stream() .collect(Collectors.toMap( org.apache.hadoop.hive.metastore.api.Partition::getValues, partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters()))); ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder(); for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) { Partition partition = partitionValuesToPartition.get(entry.getValue()); resultBuilder.put(entry.getKey(), partition); } return resultBuilder.build(); } @Override public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) { List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream() .map(HiveMetastoreApiConverter::toMetastoreApiPartition) .collect(Collectors.toList()); client.addPartitions(dbName, tableName, hivePartitions); } @Override public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) { client.dropPartition(dbName, tableName, partValues, deleteData); } public HivePartitionStats getTableStatistics(String dbName, String tblName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); HiveCommonStats commonStats = toHiveCommonStats(table.getParameters()); long totalRowNums = commonStats.getRowNums(); if (totalRowNums == -1) { return HivePartitionStats.empty(); } List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns); Map<String, HiveColumnStats> columnStatistics = HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums); return new HivePartitionStats(commonStats, columnStatistics); } public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) { org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName); if (originTable == null) { throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName); } org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy(); HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = newTable.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); newTable.setParameters(updateStatisticsParameters(originParams, commonStats)); client.alterTable(dbName, tableName, newTable); } public void updatePartitionStatistics(String dbName, String tableName, String partitionName, Function<HivePartitionStats, HivePartitionStats> update) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames( dbName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName); } org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions); HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy(); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = modifiedPartition.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats)); client.alterPartition(dbName, tableName, modifiedPartition); } public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) { HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table; String dbName = hmsTbl.getDbName(); String tblName = hmsTbl.getTableName(); List<String> dataColumns = hmsTbl.getDataColumnNames(); Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames); Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters()))); Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums())); ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder(); Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj = client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns); Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter .toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums); for (String partitionName : partitionCommonStats.keySet()) { HiveCommonStats commonStats = partitionCommonStats.get(partitionName); Map<String, HiveColumnStats> columnStatistics = partitionColumnStats .getOrDefault(partitionName, ImmutableMap.of()); resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics)); } return resultBuilder.build(); } public long getCurrentEventId() { return client.getCurrentNotificationEventId().getEventId(); } public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName, final boolean getAllEvents) throws MetastoreNotificationFetchException { try { int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc; NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null); if (response.getEvents().size() == 0) { LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName); return null; } LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s", response.getEvents().size(), response.getEvents().get(0).getEventId(), lastSyncedEventId, catalogName)); return response; } catch (MetastoreNotificationFetchException e) { LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e); throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " + "Last synced event id is " + lastSyncedEventId, e); } } }
class HiveMetastore implements IHiveMetastore { private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class); private final HiveMetaClient client; private final String catalogName; private final MetastoreType metastoreType; public HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) { this.client = client; this.catalogName = catalogName; this.metastoreType = metastoreType; } @Override public List<String> getAllDatabaseNames() { return client.getAllDatabaseNames(); } @Override public void createDb(String dbName, Map<String, String> properties) { String location = properties.getOrDefault(LOCATION_PROPERTY, ""); long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); Database database = new Database(dbId, dbName, location); client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database)); } @Override public void dropDb(String dbName, boolean deleteData) { client.dropDatabase(dbName, deleteData); } @Override public List<String> getAllTableNames(String dbName) { return client.getAllTableNames(dbName); } @Override public Database getDb(String dbName) { org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName); return HiveMetastoreApiConverter.toDatabase(db); } @Override public void createTable(String dbName, Table table) { org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table); client.createTable(hiveTable); } @Override public void dropTable(String dbName, String tableName) { client.dropTable(dbName, tableName); } public Table getTable(String dbName, String tableName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName); StorageDescriptor sd = table.getSd(); if (sd == null) { throw new StarRocksConnectorException("Table is missing storage descriptor"); } if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) { validateHiveTableType(table.getTableType()); if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) { return HiveMetastoreApiConverter.toHiveView(table, catalogName); } else { return HiveMetastoreApiConverter.toHiveTable(table, catalogName); } } else { return HiveMetastoreApiConverter.toHudiTable(table, catalogName); } } @Override public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) { if (partitionValues.isEmpty()) { return client.getPartitionKeys(dbName, tableName); } else { List<String> partitionValuesStr = partitionValues.stream() .map(v -> v.orElse("")).collect(Collectors.toList()); return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr); } } @Override public boolean partitionExists(Table table, List<String> partitionValues) { HiveTable hiveTable = (HiveTable) table; String dbName = hiveTable.getDbName(); String tableName = hiveTable.getTableName(); if (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) { List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName); String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues); return allPartitionNames.contains(hivePartitionName); } else { return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty(); } } @Override public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>(); for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) { int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size()); List<String> namesPerRPC = partitionNames.subList(start, end); List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC = client.getPartitionsByNames(dbName, tblName, namesPerRPC); partitions.addAll(partsPerRPC); } Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream() .collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues)); Map<List<String>, Partition> partitionValuesToPartition = partitions.stream() .collect(Collectors.toMap( org.apache.hadoop.hive.metastore.api.Partition::getValues, partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters()))); ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder(); for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) { Partition partition = partitionValuesToPartition.get(entry.getValue()); resultBuilder.put(entry.getKey(), partition); } return resultBuilder.build(); } @Override public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) { List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream() .map(HiveMetastoreApiConverter::toMetastoreApiPartition) .collect(Collectors.toList()); client.addPartitions(dbName, tableName, hivePartitions); } @Override public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) { client.dropPartition(dbName, tableName, partValues, deleteData); } public HivePartitionStats getTableStatistics(String dbName, String tblName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); HiveCommonStats commonStats = toHiveCommonStats(table.getParameters()); long totalRowNums = commonStats.getRowNums(); if (totalRowNums == -1) { return HivePartitionStats.empty(); } List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns); Map<String, HiveColumnStats> columnStatistics = HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums); return new HivePartitionStats(commonStats, columnStatistics); } public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) { org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName); if (originTable == null) { throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName); } org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy(); HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = newTable.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); newTable.setParameters(updateStatisticsParameters(originParams, commonStats)); client.alterTable(dbName, tableName, newTable); } public void updatePartitionStatistics(String dbName, String tableName, String partitionName, Function<HivePartitionStats, HivePartitionStats> update) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames( dbName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName); } org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions); HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy(); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = modifiedPartition.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats)); client.alterPartition(dbName, tableName, modifiedPartition); } public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) { HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table; String dbName = hmsTbl.getDbName(); String tblName = hmsTbl.getTableName(); List<String> dataColumns = hmsTbl.getDataColumnNames(); Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames); Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters()))); Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums())); ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder(); Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj = client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns); Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter .toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums); for (String partitionName : partitionCommonStats.keySet()) { HiveCommonStats commonStats = partitionCommonStats.get(partitionName); Map<String, HiveColumnStats> columnStatistics = partitionColumnStats .getOrDefault(partitionName, ImmutableMap.of()); resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics)); } return resultBuilder.build(); } public long getCurrentEventId() { return client.getCurrentNotificationEventId().getEventId(); } public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName, final boolean getAllEvents) throws MetastoreNotificationFetchException { try { int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc; NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null); if (response.getEvents().size() == 0) { LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName); return null; } LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s", response.getEvents().size(), response.getEvents().get(0).getEventId(), lastSyncedEventId, catalogName)); return response; } catch (MetastoreNotificationFetchException e) { LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e); throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " + "Last synced event id is " + lastSyncedEventId, e); } } }
@tiagobento could you please explain this one. `ignoreAvailability` is `true`. Is it availability of artifacts that should be ignored?
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropBlank() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); }
assertNotNull(resolveOrgAcmeFooJar001(mvn));
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropBlank() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); }
class ChainedLocalRepositoryManagerTest extends BootstrapMavenContextTestBase { private static final String M2_LOCAL_1; private static final String M2_LOCAL_2; private static final String M2_FROM_REMOTE; static { final String projectLocation; try { projectLocation = getProjectLocation("workspace-with-local-repo-tail").toString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } M2_LOCAL_1 = Paths.get(projectLocation, ".m2-local-1", "repository").toAbsolutePath().toString(); M2_LOCAL_2 = Paths.get(projectLocation, ".m2-local-2", "repository").toAbsolutePath().toString(); M2_FROM_REMOTE = Paths.get(projectLocation, ".m2-from-remote", "repository").toAbsolutePath().toString(); } @Test public void testNoTail() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButEmptyString() throws Exception { setSystemProp("maven.repo.local.tail", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButBlank() throws Exception { setSystemProp("maven.repo.local.tail", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButNonExistent() throws Exception { setSystemProp("maven.repo.local.tail", "/tmp/this-dir-does-not-exist"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail", M2_LOCAL_1); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailResolutionOrder() throws Exception { final BootstrapMavenContext mvnLocal1first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final BootstrapMavenContext mvnLocal2first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_2, M2_LOCAL_1 })); assertEquals(resolveOrgAcmeFooJar001(mvnLocal1first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); assertEquals(resolveOrgAcmeFooJar001(mvnLocal2first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); } @Test public void testValidTailMultiplicity() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final Artifact foo = resolveOrgAcmeFooJar001(mvn); assertNotNull(foo); assertEquals(foo.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); final Artifact bar = resolveOrgAcmeBarJar002(mvn); assertNotNull(bar); assertEquals(bar.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "bar", "0.0.2", "bar-0.0.2.jar").toAbsolutePath().toString()); } @Test public void testValidTailLocalCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "false"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropEmpty() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropTruthy() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "fals"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailLocalIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } private Artifact resolveOrgAcmeFooJar001(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "foo", "", "jar", "0.0.1")).getArtifact(); } private Artifact resolveOrgAcmeBarJar002(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "bar", "", "jar", "0.0.2")).getArtifact(); } }
class ChainedLocalRepositoryManagerTest extends BootstrapMavenContextTestBase { private static final String M2_LOCAL_1; private static final String M2_LOCAL_2; private static final String M2_FROM_REMOTE; static { final String projectLocation; try { projectLocation = getProjectLocation("workspace-with-local-repo-tail").toString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } M2_LOCAL_1 = Paths.get(projectLocation, ".m2-local-1", "repository").toAbsolutePath().toString(); M2_LOCAL_2 = Paths.get(projectLocation, ".m2-local-2", "repository").toAbsolutePath().toString(); M2_FROM_REMOTE = Paths.get(projectLocation, ".m2-from-remote", "repository").toAbsolutePath().toString(); } @Test public void testNoTail() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButEmptyString() throws Exception { setSystemProp("maven.repo.local.tail", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButBlank() throws Exception { setSystemProp("maven.repo.local.tail", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButNonExistent() throws Exception { setSystemProp("maven.repo.local.tail", "/tmp/this-dir-does-not-exist"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail", M2_LOCAL_1); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailResolutionOrder() throws Exception { final BootstrapMavenContext mvnLocal1first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final BootstrapMavenContext mvnLocal2first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_2, M2_LOCAL_1 })); assertEquals(resolveOrgAcmeFooJar001(mvnLocal1first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); assertEquals(resolveOrgAcmeFooJar001(mvnLocal2first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); } @Test public void testValidTailMultiplicity() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final Artifact foo = resolveOrgAcmeFooJar001(mvn); assertNotNull(foo); assertEquals(foo.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); final Artifact bar = resolveOrgAcmeBarJar002(mvn); assertNotNull(bar); assertEquals(bar.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "bar", "0.0.2", "bar-0.0.2.jar").toAbsolutePath().toString()); } @Test public void testValidTailLocalCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "false"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropEmpty() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropTruthy() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "fals"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailLocalIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } private Artifact resolveOrgAcmeFooJar001(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "foo", "", "jar", "0.0.1")).getArtifact(); } private Artifact resolveOrgAcmeBarJar002(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "bar", "", "jar", "0.0.2")).getArtifact(); } }
missing a check for it failing if only the port is set.
public void testConnectToPushGatewayThrowsExceptionWithoutHostInformation() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); Assert.assertThrows( IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig)); metricConfig.setProperty(HOST.key(), "localhost"); Assert.assertThrows( IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig)); }
IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig));
public void testConnectToPushGatewayThrowsExceptionWithoutHostInformation() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); Assert.assertThrows( IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig)); metricConfig.setProperty(HOST.key(), "localhost"); Assert.assertThrows( IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig)); metricConfig.clear(); metricConfig.setProperty(PORT.key(), "18080"); Assert.assertThrows( IllegalArgumentException.class, () -> factory.createMetricReporter(metricConfig)); }
class PrometheusPushGatewayReporterTest extends TestLogger { @Test public void testParseGroupingKey() { Map<String, String> groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1=v1;k2=v2"); Assert.assertNotNull(groupingKey); Assert.assertEquals("v1", groupingKey.get("k1")); Assert.assertEquals("v2", groupingKey.get("k2")); } @Test public void testParseIncompleteGroupingKey() { Map<String, String> groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1="); Assert.assertTrue(groupingKey.isEmpty()); groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("=v1"); Assert.assertTrue(groupingKey.isEmpty()); groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1"); Assert.assertTrue(groupingKey.isEmpty()); } @Test public void testConnectToPushGatewayUsingHostAndPort() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST.key(), "localhost"); metricConfig.setProperty(PORT.key(), "18080"); PrometheusPushGatewayReporter reporter = factory.createMetricReporter(metricConfig); String gatewayBaseURL = (String) Whitebox.getInternalState(reporter.getPushGateway(), "gatewayBaseURL"); Assert.assertEquals(gatewayBaseURL, "http: } @Test public void testConnectToPushGatewayUsingHostUrl() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST_URL.key(), "https: PrometheusPushGatewayReporter reporter = factory.createMetricReporter(metricConfig); String gatewayBaseURL = (String) Whitebox.getInternalState(reporter.getPushGateway(), "gatewayBaseURL"); Assert.assertEquals(gatewayBaseURL, "https: } @Test public void testConnectToPushGatewayPreferHostUrl() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST_URL.key(), "https: metricConfig.setProperty(HOST.key(), "localhost1"); metricConfig.setProperty(PORT.key(), "18081"); PrometheusPushGatewayReporter reporter = factory.createMetricReporter(metricConfig); String gatewayBaseURL = (String) Whitebox.getInternalState(reporter.getPushGateway(), "gatewayBaseURL"); Assert.assertEquals(gatewayBaseURL, "https: } @Test }
class PrometheusPushGatewayReporterTest extends TestLogger { @Test public void testParseGroupingKey() { Map<String, String> groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1=v1;k2=v2"); Assert.assertNotNull(groupingKey); Assert.assertEquals("v1", groupingKey.get("k1")); Assert.assertEquals("v2", groupingKey.get("k2")); } @Test public void testParseIncompleteGroupingKey() { Map<String, String> groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1="); Assert.assertTrue(groupingKey.isEmpty()); groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("=v1"); Assert.assertTrue(groupingKey.isEmpty()); groupingKey = PrometheusPushGatewayReporterFactory.parseGroupingKey("k1"); Assert.assertTrue(groupingKey.isEmpty()); } @Test public void testConnectToPushGatewayUsingHostAndPort() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST.key(), "localhost"); metricConfig.setProperty(PORT.key(), "18080"); PrometheusPushGatewayReporter reporter = factory.createMetricReporter(metricConfig); String gatewayBaseURL = factory.createMetricReporter(metricConfig).hostUrl.toString(); Assert.assertEquals(gatewayBaseURL, "http: } @Test public void testConnectToPushGatewayUsingHostUrl() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST_URL.key(), "https: PrometheusPushGatewayReporter reporter = factory.createMetricReporter(metricConfig); String gatewayBaseURL = factory.createMetricReporter(metricConfig).hostUrl.toString(); Assert.assertEquals(gatewayBaseURL, "https: } @Test public void testConnectToPushGatewayPreferHostUrl() { PrometheusPushGatewayReporterFactory factory = new PrometheusPushGatewayReporterFactory(); MetricConfig metricConfig = new MetricConfig(); metricConfig.setProperty(HOST_URL.key(), "https: metricConfig.setProperty(HOST.key(), "localhost1"); metricConfig.setProperty(PORT.key(), "18081"); String gatewayBaseURL = factory.createMetricReporter(metricConfig).hostUrl.toString(); Assert.assertEquals(gatewayBaseURL, "https: } @Test }
I added more test, and discover a missing check so worth it ;)
private void testRange(PanacheQuery<Person> query) { List<Person> persons = query.range(0, 2).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); persons = query.range(3, 5).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); persons = query.range(6, 8).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); persons = query.range(8, 12).list(); Assertions.assertEquals(0, persons.size()); try { query.range(0, 2).nextPage(); Assertions.fail(); } catch (UnsupportedOperationException e) { } persons = query.range(0, 2).page(0, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); }
query.range(0, 2).nextPage();
private void testRange(PanacheQuery<Person> query) { List<Person> persons = query.range(0, 2).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); persons = query.range(3, 5).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); persons = query.range(6, 8).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); persons = query.range(8, 12).list(); Assertions.assertEquals(0, persons.size()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).nextPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).previousPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).pageCount()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).lastPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).firstPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).hasPreviousPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).hasNextPage()); Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).page()); persons = query.range(0, 2).page(0, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); }
class TestEndpoint { @GET @Path("model") @Transactional public String testModel() { List<Person> persons = Person.findAll().list(); Assertions.assertEquals(0, persons.size()); persons = Person.listAll(); Assertions.assertEquals(0, persons.size()); Stream<Person> personStream = Person.findAll().stream(); Assertions.assertEquals(0, personStream.count()); personStream = Person.streamAll(); Assertions.assertEquals(0, personStream.count()); try { Person.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NoResultException x) { } Assertions.assertNull(Person.findAll().firstResult()); Person person = makeSavedPerson(); Assertions.assertNotNull(person.id); Assertions.assertEquals(1, Person.count()); Assertions.assertEquals(1, Person.count("name = ?1", "stef")); Assertions.assertEquals(1, Person.count("name = :name", Parameters.with("name", "stef").map())); Assertions.assertEquals(1, Person.count("name = :name", Parameters.with("name", "stef"))); Assertions.assertEquals(1, Person.count("name", "stef")); Assertions.assertEquals(1, Dog.count()); Assertions.assertEquals(1, person.dogs.size()); persons = Person.findAll().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.listAll(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = Person.findAll().stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.streamAll(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, Person.findAll().firstResult()); Assertions.assertEquals(person, Person.findAll().singleResult()); persons = Person.find("name = ?1", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = ?1", "stef").withLock(LockModeType.PESSIMISTIC_READ).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = ?1", "stef").withHint(QueryHints.HINT_CACHEABLE, "true").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = ?1", "stef"); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = :name", Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = :name", Parameters.with("name", "stef")).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = Person.find("name = ?1", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = ?1", "stef"); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.find("name", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, Person.find("name", "stef").firstResult()); Assertions.assertEquals(person, Person.find("name", "stef").singleResult()); Person byId = Person.findById(person.id); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.<Person> findByIdOptional(person.id).get(); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.findById(person.id, LockModeType.PESSIMISTIC_READ); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.<Person> findByIdOptional(person.id, LockModeType.PESSIMISTIC_READ).get(); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); person.delete(); Assertions.assertEquals(0, Person.count()); person = makeSavedPerson(); Assertions.assertEquals(1, Person.count()); Assertions.assertEquals(0, Person.delete("name = ?1", "emmanuel")); Assertions.assertEquals(1, Dog.delete("owner = ?1", person)); Assertions.assertEquals(1, Person.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, Dog.delete("owner = :owner", Parameters.with("owner", person).map())); Assertions.assertEquals(1, Person.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, Dog.delete("owner = :owner", Parameters.with("owner", person))); Assertions.assertEquals(1, Person.delete("name", "stef")); Assertions.assertEquals(0, Person.deleteAll()); makeSavedPerson(); Assertions.assertEquals(1, Dog.deleteAll()); Assertions.assertEquals(1, Person.deleteAll()); testPersist(PersistTest.Iterable); testPersist(PersistTest.Stream); testPersist(PersistTest.Variadic); Assertions.assertEquals(6, Person.deleteAll()); testSorting(); for (int i = 0; i < 7; i++) { makeSavedPerson(String.valueOf(i)); } testPaging(Person.findAll()); testPaging(Person.find("ORDER BY name")); testRange(Person.findAll()); testRange(Person.find("ORDER BY name")); try { Person.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NonUniqueResultException x) { } Assertions.assertNotNull(Person.findAll().firstResult()); Assertions.assertNotNull(Person.findAll().firstResultOptional().get()); Assertions.assertEquals(7, Person.deleteAll()); testUpdate(); Person person1 = new Person(); person1.name = "testFLush1"; person1.uniqueName = "unique"; person1.persist(); Person person2 = new Person(); person2.name = "testFLush2"; person2.uniqueName = "unique"; try { person2.persistAndFlush(); Assertions.fail(); } catch (PersistenceException pe) { } return "OK"; } private void testUpdate() { makeSavedPerson("p1"); makeSavedPerson("p2"); int updateByIndexParameter = Person.update("update from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); int updateByNamedParameter = Person.update("update from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("set name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("set name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2")); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); Assertions.assertThrows(PanacheQueryException.class, () -> Person.update(null), "PanacheQueryException should have thrown"); Assertions.assertThrows(PanacheQueryException.class, () -> Person.update(" "), "PanacheQueryException should have thrown"); } private void testUpdateDAO() { makeSavedPerson("p1"); makeSavedPerson("p2"); int updateByIndexParameter = personDao.update("update from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); int updateByNamedParameter = personDao.update("update from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("set name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("set name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2")); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); Assertions.assertThrows(PanacheQueryException.class, () -> personDao.update(null), "PanacheQueryException should have thrown"); Assertions.assertThrows(PanacheQueryException.class, () -> personDao.update(" "), "PanacheQueryException should have thrown"); } private void testSorting() { Person person1 = new Person(); person1.name = "stef"; person1.status = Status.LIVING; person1.persist(); Person person2 = new Person(); person2.name = "stef"; person2.status = Status.DECEASED; person2.persist(); Person person3 = new Person(); person3.name = "emmanuel"; person3.status = Status.LIVING; person3.persist(); Sort sort1 = Sort.by("name", "status"); List<Person> order1 = Arrays.asList(person3, person1, person2); List<Person> list = Person.findAll(sort1).list(); Assertions.assertEquals(order1, list); list = Person.listAll(sort1); Assertions.assertEquals(order1, list); list = Person.<Person> streamAll(sort1).collect(Collectors.toList()); Assertions.assertEquals(order1, list); Sort sort2 = Sort.descending("name", "status"); List<Person> order2 = Arrays.asList(person2, person1); list = Person.find("name", sort2, "stef").list(); Assertions.assertEquals(order2, list); list = Person.list("name", sort2, "stef"); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name", sort2, "stef").collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = Person.find("name = :name", sort2, Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(order2, list); list = Person.list("name = :name", sort2, Parameters.with("name", "stef").map()); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name = :name", sort2, Parameters.with("name", "stef").map()) .collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = Person.find("name = :name", sort2, Parameters.with("name", "stef")).list(); Assertions.assertEquals(order2, list); list = Person.list("name = :name", sort2, Parameters.with("name", "stef")); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name = :name", sort2, Parameters.with("name", "stef")).collect(Collectors.toList()); Assertions.assertEquals(order2, list); Assertions.assertEquals(3, Person.deleteAll()); } private Person makeSavedPerson(String suffix) { Person person = new Person(); person.name = "stef" + suffix; person.status = Status.LIVING; person.address = new Address("stef street"); person.address.persist(); person.persist(); return person; } private Person makeSavedPerson() { Person person = makeSavedPerson(""); Dog dog = new Dog("octave", "dalmatian"); dog.owner = person; person.dogs.add(dog); dog.persist(); return person; } private void testPersist(PersistTest persistTest) { Person person1 = new Person(); person1.name = "stef1"; Person person2 = new Person(); person2.name = "stef2"; Assertions.assertFalse(person1.isPersistent()); Assertions.assertFalse(person2.isPersistent()); switch (persistTest) { case Iterable: Person.persist(Arrays.asList(person1, person2)); break; case Stream: Person.persist(Stream.of(person1, person2)); break; case Variadic: Person.persist(person1, person2); break; } Assertions.assertTrue(person1.isPersistent()); Assertions.assertTrue(person2.isPersistent()); } @Inject PersonRepository personDao; @Inject DogDao dogDao; @Inject AddressDao addressDao; @GET @Path("model-dao") @Transactional public String testModelDao() { List<Person> persons = personDao.findAll().list(); Assertions.assertEquals(0, persons.size()); Stream<Person> personStream = personDao.findAll().stream(); Assertions.assertEquals(0, personStream.count()); try { personDao.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NoResultException x) { } Assertions.assertFalse(personDao.findAll().singleResultOptional().isPresent()); Assertions.assertNull(personDao.findAll().firstResult()); Assertions.assertFalse(personDao.findAll().firstResultOptional().isPresent()); Person person = makeSavedPersonDao(); Assertions.assertNotNull(person.id); Assertions.assertEquals(1, personDao.count()); Assertions.assertEquals(1, personDao.count("name = ?1", "stef")); Assertions.assertEquals(1, personDao.count("name = :name", Parameters.with("name", "stef").map())); Assertions.assertEquals(1, personDao.count("name = :name", Parameters.with("name", "stef"))); Assertions.assertEquals(1, personDao.count("name", "stef")); Assertions.assertEquals(1, dogDao.count()); Assertions.assertEquals(1, person.dogs.size()); persons = personDao.findAll().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.listAll(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = personDao.findAll().stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.streamAll(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, personDao.findAll().firstResult()); Assertions.assertEquals(person, personDao.findAll().singleResult()); Assertions.assertEquals(person, personDao.findAll().singleResultOptional().get()); persons = personDao.find("name = ?1", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = ?1", "stef").withLock(LockModeType.PESSIMISTIC_READ).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = ?1", "stef"); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = :name", Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = :name", Parameters.with("name", "stef")).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = personDao.find("name = ?1", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = ?1", "stef"); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.find("name", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, personDao.find("name", "stef").firstResult()); Assertions.assertEquals(person, personDao.find("name", "stef").singleResult()); Assertions.assertEquals(person, personDao.find("name", "stef").singleResultOptional().get()); Person byId = personDao.findById(person.id); Assertions.assertEquals(person, byId); byId = personDao.findByIdOptional(person.id).get(); Assertions.assertEquals(person, byId); byId = personDao.findById(person.id, LockModeType.PESSIMISTIC_READ); Assertions.assertEquals(person, byId); byId = personDao.findByIdOptional(person.id, LockModeType.PESSIMISTIC_READ).get(); Assertions.assertEquals(person, byId); personDao.delete(person); Assertions.assertEquals(0, personDao.count()); person = makeSavedPersonDao(); Assertions.assertEquals(1, personDao.count()); Assertions.assertEquals(0, personDao.delete("name = ?1", "emmanuel")); Assertions.assertEquals(1, dogDao.delete("owner = ?1", person)); Assertions.assertEquals(1, personDao.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, dogDao.delete("owner = :owner", Parameters.with("owner", person).map())); Assertions.assertEquals(1, personDao.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, dogDao.delete("owner = :owner", Parameters.with("owner", person))); Assertions.assertEquals(1, personDao.delete("name", "stef")); Assertions.assertEquals(0, personDao.deleteAll()); makeSavedPersonDao(); Assertions.assertEquals(1, dogDao.deleteAll()); Assertions.assertEquals(1, personDao.deleteAll()); testPersistDao(PersistTest.Iterable); testPersistDao(PersistTest.Stream); testPersistDao(PersistTest.Variadic); Assertions.assertEquals(6, personDao.deleteAll()); testSortingDao(); for (int i = 0; i < 7; i++) { makeSavedPersonDao(String.valueOf(i)); } testPaging(personDao.findAll()); testPaging(personDao.find("ORDER BY name")); testRange(personDao.findAll()); testRange(personDao.find("ORDER BY name")); try { personDao.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NonUniqueResultException x) { } Assertions.assertNotNull(personDao.findAll().firstResult()); Assertions.assertEquals(7, personDao.deleteAll()); testUpdateDAO(); Person person1 = new Person(); person1.name = "testFlush1"; person1.uniqueName = "unique"; personDao.persist(person1); Person person2 = new Person(); person2.name = "testFlush2"; person2.uniqueName = "unique"; try { personDao.persistAndFlush(person2); Assertions.fail(); } catch (PersistenceException pe) { } return "OK"; } private void testSortingDao() { Person person1 = new Person(); person1.name = "stef"; person1.status = Status.LIVING; personDao.persist(person1); Person person2 = new Person(); person2.name = "stef"; person2.status = Status.DECEASED; personDao.persist(person2); Person person3 = new Person(); person3.name = "emmanuel"; person3.status = Status.LIVING; personDao.persist(person3); Sort sort1 = Sort.by("name", "status"); List<Person> order1 = Arrays.asList(person3, person1, person2); List<Person> list = personDao.findAll(sort1).list(); Assertions.assertEquals(order1, list); list = personDao.listAll(sort1); Assertions.assertEquals(order1, list); list = personDao.streamAll(sort1).collect(Collectors.toList()); Assertions.assertEquals(order1, list); Sort sort2 = Sort.descending("name", "status"); List<Person> order2 = Arrays.asList(person2, person1); list = personDao.find("name", sort2, "stef").list(); Assertions.assertEquals(order2, list); list = personDao.list("name", sort2, "stef"); Assertions.assertEquals(order2, list); list = personDao.stream("name", sort2, "stef").collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = personDao.find("name = :name", sort2, Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(order2, list); list = personDao.list("name = :name", sort2, Parameters.with("name", "stef").map()); Assertions.assertEquals(order2, list); list = personDao.stream("name = :name", sort2, Parameters.with("name", "stef").map()).collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = personDao.find("name = :name", sort2, Parameters.with("name", "stef")).list(); Assertions.assertEquals(order2, list); list = personDao.list("name = :name", sort2, Parameters.with("name", "stef")); Assertions.assertEquals(order2, list); list = personDao.stream("name = :name", sort2, Parameters.with("name", "stef")).collect(Collectors.toList()); Assertions.assertEquals(order2, list); Assertions.assertEquals(3, Person.deleteAll()); } enum PersistTest { Iterable, Variadic, Stream; } private void testPersistDao(PersistTest persistTest) { Person person1 = new Person(); person1.name = "stef1"; Person person2 = new Person(); person2.name = "stef2"; Assertions.assertFalse(person1.isPersistent()); Assertions.assertFalse(person2.isPersistent()); switch (persistTest) { case Iterable: personDao.persist(Arrays.asList(person1, person2)); break; case Stream: personDao.persist(Stream.of(person1, person2)); break; case Variadic: personDao.persist(person1, person2); break; } Assertions.assertTrue(person1.isPersistent()); Assertions.assertTrue(person2.isPersistent()); } private Person makeSavedPersonDao(String suffix) { Person person = new Person(); person.name = "stef" + suffix; person.status = Status.LIVING; person.address = new Address("stef street"); addressDao.persist(person.address); personDao.persist(person); return person; } private Person makeSavedPersonDao() { Person person = makeSavedPersonDao(""); Dog dog = new Dog("octave", "dalmatian"); dog.owner = person; person.dogs.add(dog); dogDao.persist(dog); return person; } private void testPaging(PanacheQuery<Person> query) { List<Person> persons = query.page(0, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); persons = query.page(1, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); persons = query.page(2, 3).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); persons = query.page(2, 4).list(); Assertions.assertEquals(0, persons.size()); Page page = new Page(3); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(0, persons.size()); page = new Page(3); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); Assertions.assertTrue(query.hasNextPage()); Assertions.assertFalse(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(1, query.page().index); Assertions.assertEquals(3, query.page().size); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); Assertions.assertTrue(query.hasNextPage()); Assertions.assertTrue(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); Assertions.assertFalse(query.hasNextPage()); Assertions.assertTrue(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(0, persons.size()); Assertions.assertEquals(7, query.count()); Assertions.assertEquals(3, query.pageCount()); persons = query.page(0, 3).range(0, 1).list(); Assertions.assertEquals(2, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); } @GET @Path("accessors") public String testAccessors() throws NoSuchMethodException, SecurityException { checkMethod(AccessorEntity.class, "getString", String.class); checkMethod(AccessorEntity.class, "isBool", boolean.class); checkMethod(AccessorEntity.class, "getC", char.class); checkMethod(AccessorEntity.class, "getS", short.class); checkMethod(AccessorEntity.class, "getI", int.class); checkMethod(AccessorEntity.class, "getL", long.class); checkMethod(AccessorEntity.class, "getF", float.class); checkMethod(AccessorEntity.class, "getD", double.class); checkMethod(AccessorEntity.class, "getT", Object.class); checkMethod(AccessorEntity.class, "getT2", Object.class); checkMethod(AccessorEntity.class, "setString", void.class, String.class); checkMethod(AccessorEntity.class, "setBool", void.class, boolean.class); checkMethod(AccessorEntity.class, "setC", void.class, char.class); checkMethod(AccessorEntity.class, "setS", void.class, short.class); checkMethod(AccessorEntity.class, "setI", void.class, int.class); checkMethod(AccessorEntity.class, "setL", void.class, long.class); checkMethod(AccessorEntity.class, "setF", void.class, float.class); checkMethod(AccessorEntity.class, "setD", void.class, double.class); checkMethod(AccessorEntity.class, "setT", void.class, Object.class); checkMethod(AccessorEntity.class, "setT2", void.class, Object.class); try { checkMethod(AccessorEntity.class, "getTrans2", Object.class); Assertions.fail("transient field should have no getter: trans2"); } catch (NoSuchMethodException x) { } try { checkMethod(AccessorEntity.class, "setTrans2", void.class, Object.class); Assertions.fail("transient field should have no setter: trans2"); } catch (NoSuchMethodException x) { } AccessorEntity entity = new AccessorEntity(); @SuppressWarnings("unused") byte b = entity.b; Assertions.assertEquals(1, entity.getBCalls); entity.i = 2; Assertions.assertEquals(1, entity.setICalls); Object trans = entity.trans; Assertions.assertEquals(0, entity.getTransCalls); entity.trans = trans; Assertions.assertEquals(0, entity.setTransCalls); entity.method(); Assertions.assertEquals(2, entity.getBCalls); Assertions.assertEquals(2, entity.setICalls); return "OK"; } private void checkMethod(Class<?> klass, String name, Class<?> returnType, Class<?>... params) throws NoSuchMethodException, SecurityException { Method method = klass.getMethod(name, params); Assertions.assertEquals(returnType, method.getReturnType()); } @GET @Path("model1") @Transactional public String testModel1() { Assertions.assertEquals(0, Person.count()); Person person = makeSavedPerson(); SelfDirtinessTracker trackingPerson = (SelfDirtinessTracker) person; String[] dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes(); Assertions.assertEquals(0, dirtyAttributes.length); person.name = "1"; dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes(); Assertions.assertEquals(1, dirtyAttributes.length); Assertions.assertEquals(1, Person.count()); return "OK"; } @GET @Path("model2") @Transactional public String testModel2() { Assertions.assertEquals(1, Person.count()); Person person = Person.findAll().firstResult(); Assertions.assertEquals("1", person.name); person.name = "2"; return "OK"; } @GET @Path("model3") @Transactional public String testModel3() { Assertions.assertEquals(1, Person.count()); Person person = Person.findAll().firstResult(); Assertions.assertEquals("2", person.name); Dog.deleteAll(); Person.deleteAll(); Address.deleteAll(); Assertions.assertEquals(0, Person.count()); return "OK"; } @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @GET @Path("ignored-properties") public Person ignoredProperties() throws NoSuchMethodException, SecurityException { Person.class.getMethod("$$_hibernate_read_id"); Person.class.getMethod("$$_hibernate_read_name"); try { Person.class.getMethod("$$_hibernate_read_persistent"); Assertions.fail(); } catch (NoSuchMethodException e) { } Person person = new Person(); person.id = 666l; person.name = "Eddie"; person.status = Status.DECEASED; return person; } @Inject Bug5274EntityRepository bug5274EntityRepository; @GET @Path("5274") @Transactional public String testBug5274() { bug5274EntityRepository.count(); return "OK"; } @Inject Bug5885EntityRepository bug5885EntityRepository; @GET @Path("5885") @Transactional public String testBug5885() { bug5885EntityRepository.findById(1L); return "OK"; } @GET @Path("testJaxbAnnotationTransfer") public String testJaxbAnnotationTransfer() throws Exception { Method m = JAXBEntity.class.getMethod("getNamedAnnotatedProp"); XmlAttribute anno = m.getAnnotation(XmlAttribute.class); assertNotNull(anno); assertEquals("Named", anno.name()); assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getDefaultAnnotatedProp"); anno = m.getAnnotation(XmlAttribute.class); assertNotNull(anno); assertEquals(" assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getUnAnnotatedProp"); assertNull(m.getAnnotation(XmlAttribute.class)); assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getTransientProp"); assertNull(m.getAnnotation(XmlAttribute.class)); assertNotNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getArrayAnnotatedProp"); assertNull(m.getAnnotation(XmlTransient.class)); XmlElements elementsAnno = m.getAnnotation(XmlElements.class); assertNotNull(elementsAnno); assertNotNull(elementsAnno.value()); assertEquals(2, elementsAnno.value().length); assertEquals("array1", elementsAnno.value()[0].name()); assertEquals("array2", elementsAnno.value()[1].name()); ensureFieldSanitized("namedAnnotatedProp"); ensureFieldSanitized("transientProp"); ensureFieldSanitized("defaultAnnotatedProp"); ensureFieldSanitized("unAnnotatedProp"); ensureFieldSanitized("arrayAnnotatedProp"); return "OK"; } private void ensureFieldSanitized(String fieldName) throws Exception { Field f = JAXBEntity.class.getField(fieldName); assertNull(f.getAnnotation(XmlAttribute.class)); assertNotNull(f.getAnnotation(XmlTransient.class)); } }
class TestEndpoint { @GET @Path("model") @Transactional public String testModel() { List<Person> persons = Person.findAll().list(); Assertions.assertEquals(0, persons.size()); persons = Person.listAll(); Assertions.assertEquals(0, persons.size()); Stream<Person> personStream = Person.findAll().stream(); Assertions.assertEquals(0, personStream.count()); personStream = Person.streamAll(); Assertions.assertEquals(0, personStream.count()); try { Person.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NoResultException x) { } Assertions.assertNull(Person.findAll().firstResult()); Person person = makeSavedPerson(); Assertions.assertNotNull(person.id); Assertions.assertEquals(1, Person.count()); Assertions.assertEquals(1, Person.count("name = ?1", "stef")); Assertions.assertEquals(1, Person.count("name = :name", Parameters.with("name", "stef").map())); Assertions.assertEquals(1, Person.count("name = :name", Parameters.with("name", "stef"))); Assertions.assertEquals(1, Person.count("name", "stef")); Assertions.assertEquals(1, Dog.count()); Assertions.assertEquals(1, person.dogs.size()); persons = Person.findAll().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.listAll(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = Person.findAll().stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.streamAll(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, Person.findAll().firstResult()); Assertions.assertEquals(person, Person.findAll().singleResult()); persons = Person.find("name = ?1", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = ?1", "stef").withLock(LockModeType.PESSIMISTIC_READ).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = ?1", "stef").withHint(QueryHints.HINT_CACHEABLE, "true").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = ?1", "stef"); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = :name", Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name = :name", Parameters.with("name", "stef")).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.list("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = Person.find("name", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = Person.find("name = ?1", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = ?1", "stef"); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.stream("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = Person.find("name", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, Person.find("name", "stef").firstResult()); Assertions.assertEquals(person, Person.find("name", "stef").singleResult()); Person byId = Person.findById(person.id); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.<Person> findByIdOptional(person.id).get(); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.findById(person.id, LockModeType.PESSIMISTIC_READ); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); byId = Person.<Person> findByIdOptional(person.id, LockModeType.PESSIMISTIC_READ).get(); Assertions.assertEquals(person, byId); Assertions.assertEquals("Person<" + person.id + ">", byId.toString()); person.delete(); Assertions.assertEquals(0, Person.count()); person = makeSavedPerson(); Assertions.assertEquals(1, Person.count()); Assertions.assertEquals(0, Person.delete("name = ?1", "emmanuel")); Assertions.assertEquals(1, Dog.delete("owner = ?1", person)); Assertions.assertEquals(1, Person.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, Dog.delete("owner = :owner", Parameters.with("owner", person).map())); Assertions.assertEquals(1, Person.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, Dog.delete("owner = :owner", Parameters.with("owner", person))); Assertions.assertEquals(1, Person.delete("name", "stef")); Assertions.assertEquals(0, Person.deleteAll()); makeSavedPerson(); Assertions.assertEquals(1, Dog.deleteAll()); Assertions.assertEquals(1, Person.deleteAll()); testPersist(PersistTest.Iterable); testPersist(PersistTest.Stream); testPersist(PersistTest.Variadic); Assertions.assertEquals(6, Person.deleteAll()); testSorting(); for (int i = 0; i < 7; i++) { makeSavedPerson(String.valueOf(i)); } testPaging(Person.findAll()); testPaging(Person.find("ORDER BY name")); testRange(Person.findAll()); testRange(Person.find("ORDER BY name")); try { Person.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NonUniqueResultException x) { } Assertions.assertNotNull(Person.findAll().firstResult()); Assertions.assertNotNull(Person.findAll().firstResultOptional().get()); Assertions.assertEquals(7, Person.deleteAll()); testUpdate(); Person person1 = new Person(); person1.name = "testFLush1"; person1.uniqueName = "unique"; person1.persist(); Person person2 = new Person(); person2.name = "testFLush2"; person2.uniqueName = "unique"; try { person2.persistAndFlush(); Assertions.fail(); } catch (PersistenceException pe) { } return "OK"; } private void testUpdate() { makeSavedPerson("p1"); makeSavedPerson("p2"); int updateByIndexParameter = Person.update("update from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); int updateByNamedParameter = Person.update("update from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("set name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("set name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = Person.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = Person.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2")); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, Person.deleteAll()); Assertions.assertThrows(PanacheQueryException.class, () -> Person.update(null), "PanacheQueryException should have thrown"); Assertions.assertThrows(PanacheQueryException.class, () -> Person.update(" "), "PanacheQueryException should have thrown"); } private void testUpdateDAO() { makeSavedPerson("p1"); makeSavedPerson("p2"); int updateByIndexParameter = personDao.update("update from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); int updateByNamedParameter = personDao.update("update from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("from Person2 p set p.name = 'stefNEW' where p.name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("from Person2 p set p.name = 'stefNEW' where p.name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("set name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("set name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2").map()); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); makeSavedPerson("p1"); makeSavedPerson("p2"); updateByIndexParameter = personDao.update("name = 'stefNEW' where name = ?1", "stefp1"); Assertions.assertEquals(1, updateByIndexParameter, "More than one Person updated"); updateByNamedParameter = personDao.update("name = 'stefNEW' where name = :pName", Parameters.with("pName", "stefp2")); Assertions.assertEquals(1, updateByNamedParameter, "More than one Person updated"); Assertions.assertEquals(2, personDao.deleteAll()); Assertions.assertThrows(PanacheQueryException.class, () -> personDao.update(null), "PanacheQueryException should have thrown"); Assertions.assertThrows(PanacheQueryException.class, () -> personDao.update(" "), "PanacheQueryException should have thrown"); } private void testSorting() { Person person1 = new Person(); person1.name = "stef"; person1.status = Status.LIVING; person1.persist(); Person person2 = new Person(); person2.name = "stef"; person2.status = Status.DECEASED; person2.persist(); Person person3 = new Person(); person3.name = "emmanuel"; person3.status = Status.LIVING; person3.persist(); Sort sort1 = Sort.by("name", "status"); List<Person> order1 = Arrays.asList(person3, person1, person2); List<Person> list = Person.findAll(sort1).list(); Assertions.assertEquals(order1, list); list = Person.listAll(sort1); Assertions.assertEquals(order1, list); list = Person.<Person> streamAll(sort1).collect(Collectors.toList()); Assertions.assertEquals(order1, list); Sort sort2 = Sort.descending("name", "status"); List<Person> order2 = Arrays.asList(person2, person1); list = Person.find("name", sort2, "stef").list(); Assertions.assertEquals(order2, list); list = Person.list("name", sort2, "stef"); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name", sort2, "stef").collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = Person.find("name = :name", sort2, Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(order2, list); list = Person.list("name = :name", sort2, Parameters.with("name", "stef").map()); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name = :name", sort2, Parameters.with("name", "stef").map()) .collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = Person.find("name = :name", sort2, Parameters.with("name", "stef")).list(); Assertions.assertEquals(order2, list); list = Person.list("name = :name", sort2, Parameters.with("name", "stef")); Assertions.assertEquals(order2, list); list = Person.<Person> stream("name = :name", sort2, Parameters.with("name", "stef")).collect(Collectors.toList()); Assertions.assertEquals(order2, list); Assertions.assertEquals(3, Person.deleteAll()); } private Person makeSavedPerson(String suffix) { Person person = new Person(); person.name = "stef" + suffix; person.status = Status.LIVING; person.address = new Address("stef street"); person.address.persist(); person.persist(); return person; } private Person makeSavedPerson() { Person person = makeSavedPerson(""); Dog dog = new Dog("octave", "dalmatian"); dog.owner = person; person.dogs.add(dog); dog.persist(); return person; } private void testPersist(PersistTest persistTest) { Person person1 = new Person(); person1.name = "stef1"; Person person2 = new Person(); person2.name = "stef2"; Assertions.assertFalse(person1.isPersistent()); Assertions.assertFalse(person2.isPersistent()); switch (persistTest) { case Iterable: Person.persist(Arrays.asList(person1, person2)); break; case Stream: Person.persist(Stream.of(person1, person2)); break; case Variadic: Person.persist(person1, person2); break; } Assertions.assertTrue(person1.isPersistent()); Assertions.assertTrue(person2.isPersistent()); } @Inject PersonRepository personDao; @Inject DogDao dogDao; @Inject AddressDao addressDao; @GET @Path("model-dao") @Transactional public String testModelDao() { List<Person> persons = personDao.findAll().list(); Assertions.assertEquals(0, persons.size()); Stream<Person> personStream = personDao.findAll().stream(); Assertions.assertEquals(0, personStream.count()); try { personDao.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NoResultException x) { } Assertions.assertFalse(personDao.findAll().singleResultOptional().isPresent()); Assertions.assertNull(personDao.findAll().firstResult()); Assertions.assertFalse(personDao.findAll().firstResultOptional().isPresent()); Person person = makeSavedPersonDao(); Assertions.assertNotNull(person.id); Assertions.assertEquals(1, personDao.count()); Assertions.assertEquals(1, personDao.count("name = ?1", "stef")); Assertions.assertEquals(1, personDao.count("name = :name", Parameters.with("name", "stef").map())); Assertions.assertEquals(1, personDao.count("name = :name", Parameters.with("name", "stef"))); Assertions.assertEquals(1, personDao.count("name", "stef")); Assertions.assertEquals(1, dogDao.count()); Assertions.assertEquals(1, person.dogs.size()); persons = personDao.findAll().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.listAll(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = personDao.findAll().stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.streamAll(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, personDao.findAll().firstResult()); Assertions.assertEquals(person, personDao.findAll().singleResult()); Assertions.assertEquals(person, personDao.findAll().singleResultOptional().get()); persons = personDao.find("name = ?1", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = ?1", "stef").withLock(LockModeType.PESSIMISTIC_READ).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = ?1", "stef"); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = :name", Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name = :name", Parameters.with("name", "stef")).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.list("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); persons = personDao.find("name", "stef").list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals(person, persons.get(0)); personStream = personDao.find("name = ?1", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = ?1", "stef"); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = :name", Parameters.with("name", "stef").map()); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.stream("name = :name", Parameters.with("name", "stef")); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); personStream = personDao.find("name", "stef").stream(); Assertions.assertEquals(persons, personStream.collect(Collectors.toList())); Assertions.assertEquals(person, personDao.find("name", "stef").firstResult()); Assertions.assertEquals(person, personDao.find("name", "stef").singleResult()); Assertions.assertEquals(person, personDao.find("name", "stef").singleResultOptional().get()); Person byId = personDao.findById(person.id); Assertions.assertEquals(person, byId); byId = personDao.findByIdOptional(person.id).get(); Assertions.assertEquals(person, byId); byId = personDao.findById(person.id, LockModeType.PESSIMISTIC_READ); Assertions.assertEquals(person, byId); byId = personDao.findByIdOptional(person.id, LockModeType.PESSIMISTIC_READ).get(); Assertions.assertEquals(person, byId); personDao.delete(person); Assertions.assertEquals(0, personDao.count()); person = makeSavedPersonDao(); Assertions.assertEquals(1, personDao.count()); Assertions.assertEquals(0, personDao.delete("name = ?1", "emmanuel")); Assertions.assertEquals(1, dogDao.delete("owner = ?1", person)); Assertions.assertEquals(1, personDao.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, dogDao.delete("owner = :owner", Parameters.with("owner", person).map())); Assertions.assertEquals(1, personDao.delete("name", "stef")); person = makeSavedPerson(); Assertions.assertEquals(1, dogDao.delete("owner = :owner", Parameters.with("owner", person))); Assertions.assertEquals(1, personDao.delete("name", "stef")); Assertions.assertEquals(0, personDao.deleteAll()); makeSavedPersonDao(); Assertions.assertEquals(1, dogDao.deleteAll()); Assertions.assertEquals(1, personDao.deleteAll()); testPersistDao(PersistTest.Iterable); testPersistDao(PersistTest.Stream); testPersistDao(PersistTest.Variadic); Assertions.assertEquals(6, personDao.deleteAll()); testSortingDao(); for (int i = 0; i < 7; i++) { makeSavedPersonDao(String.valueOf(i)); } testPaging(personDao.findAll()); testPaging(personDao.find("ORDER BY name")); testRange(personDao.findAll()); testRange(personDao.find("ORDER BY name")); try { personDao.findAll().singleResult(); Assertions.fail("singleResult should have thrown"); } catch (NonUniqueResultException x) { } Assertions.assertNotNull(personDao.findAll().firstResult()); Assertions.assertEquals(7, personDao.deleteAll()); testUpdateDAO(); Person person1 = new Person(); person1.name = "testFlush1"; person1.uniqueName = "unique"; personDao.persist(person1); Person person2 = new Person(); person2.name = "testFlush2"; person2.uniqueName = "unique"; try { personDao.persistAndFlush(person2); Assertions.fail(); } catch (PersistenceException pe) { } return "OK"; } private void testSortingDao() { Person person1 = new Person(); person1.name = "stef"; person1.status = Status.LIVING; personDao.persist(person1); Person person2 = new Person(); person2.name = "stef"; person2.status = Status.DECEASED; personDao.persist(person2); Person person3 = new Person(); person3.name = "emmanuel"; person3.status = Status.LIVING; personDao.persist(person3); Sort sort1 = Sort.by("name", "status"); List<Person> order1 = Arrays.asList(person3, person1, person2); List<Person> list = personDao.findAll(sort1).list(); Assertions.assertEquals(order1, list); list = personDao.listAll(sort1); Assertions.assertEquals(order1, list); list = personDao.streamAll(sort1).collect(Collectors.toList()); Assertions.assertEquals(order1, list); Sort sort2 = Sort.descending("name", "status"); List<Person> order2 = Arrays.asList(person2, person1); list = personDao.find("name", sort2, "stef").list(); Assertions.assertEquals(order2, list); list = personDao.list("name", sort2, "stef"); Assertions.assertEquals(order2, list); list = personDao.stream("name", sort2, "stef").collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = personDao.find("name = :name", sort2, Parameters.with("name", "stef").map()).list(); Assertions.assertEquals(order2, list); list = personDao.list("name = :name", sort2, Parameters.with("name", "stef").map()); Assertions.assertEquals(order2, list); list = personDao.stream("name = :name", sort2, Parameters.with("name", "stef").map()).collect(Collectors.toList()); Assertions.assertEquals(order2, list); list = personDao.find("name = :name", sort2, Parameters.with("name", "stef")).list(); Assertions.assertEquals(order2, list); list = personDao.list("name = :name", sort2, Parameters.with("name", "stef")); Assertions.assertEquals(order2, list); list = personDao.stream("name = :name", sort2, Parameters.with("name", "stef")).collect(Collectors.toList()); Assertions.assertEquals(order2, list); Assertions.assertEquals(3, Person.deleteAll()); } enum PersistTest { Iterable, Variadic, Stream; } private void testPersistDao(PersistTest persistTest) { Person person1 = new Person(); person1.name = "stef1"; Person person2 = new Person(); person2.name = "stef2"; Assertions.assertFalse(person1.isPersistent()); Assertions.assertFalse(person2.isPersistent()); switch (persistTest) { case Iterable: personDao.persist(Arrays.asList(person1, person2)); break; case Stream: personDao.persist(Stream.of(person1, person2)); break; case Variadic: personDao.persist(person1, person2); break; } Assertions.assertTrue(person1.isPersistent()); Assertions.assertTrue(person2.isPersistent()); } private Person makeSavedPersonDao(String suffix) { Person person = new Person(); person.name = "stef" + suffix; person.status = Status.LIVING; person.address = new Address("stef street"); addressDao.persist(person.address); personDao.persist(person); return person; } private Person makeSavedPersonDao() { Person person = makeSavedPersonDao(""); Dog dog = new Dog("octave", "dalmatian"); dog.owner = person; person.dogs.add(dog); dogDao.persist(dog); return person; } private void testPaging(PanacheQuery<Person> query) { List<Person> persons = query.page(0, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); persons = query.page(1, 3).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); persons = query.page(2, 3).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); persons = query.page(2, 4).list(); Assertions.assertEquals(0, persons.size()); Page page = new Page(3); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); page = page.next(); persons = query.page(page).list(); Assertions.assertEquals(0, persons.size()); page = new Page(3); persons = query.page(page).list(); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); Assertions.assertEquals("stef2", persons.get(2).name); Assertions.assertTrue(query.hasNextPage()); Assertions.assertFalse(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(1, query.page().index); Assertions.assertEquals(3, query.page().size); Assertions.assertEquals(3, persons.size()); Assertions.assertEquals("stef3", persons.get(0).name); Assertions.assertEquals("stef4", persons.get(1).name); Assertions.assertEquals("stef5", persons.get(2).name); Assertions.assertTrue(query.hasNextPage()); Assertions.assertTrue(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(1, persons.size()); Assertions.assertEquals("stef6", persons.get(0).name); Assertions.assertFalse(query.hasNextPage()); Assertions.assertTrue(query.hasPreviousPage()); persons = query.nextPage().list(); Assertions.assertEquals(0, persons.size()); Assertions.assertEquals(7, query.count()); Assertions.assertEquals(3, query.pageCount()); persons = query.page(0, 3).range(0, 1).list(); Assertions.assertEquals(2, persons.size()); Assertions.assertEquals("stef0", persons.get(0).name); Assertions.assertEquals("stef1", persons.get(1).name); } @GET @Path("accessors") public String testAccessors() throws NoSuchMethodException, SecurityException { checkMethod(AccessorEntity.class, "getString", String.class); checkMethod(AccessorEntity.class, "isBool", boolean.class); checkMethod(AccessorEntity.class, "getC", char.class); checkMethod(AccessorEntity.class, "getS", short.class); checkMethod(AccessorEntity.class, "getI", int.class); checkMethod(AccessorEntity.class, "getL", long.class); checkMethod(AccessorEntity.class, "getF", float.class); checkMethod(AccessorEntity.class, "getD", double.class); checkMethod(AccessorEntity.class, "getT", Object.class); checkMethod(AccessorEntity.class, "getT2", Object.class); checkMethod(AccessorEntity.class, "setString", void.class, String.class); checkMethod(AccessorEntity.class, "setBool", void.class, boolean.class); checkMethod(AccessorEntity.class, "setC", void.class, char.class); checkMethod(AccessorEntity.class, "setS", void.class, short.class); checkMethod(AccessorEntity.class, "setI", void.class, int.class); checkMethod(AccessorEntity.class, "setL", void.class, long.class); checkMethod(AccessorEntity.class, "setF", void.class, float.class); checkMethod(AccessorEntity.class, "setD", void.class, double.class); checkMethod(AccessorEntity.class, "setT", void.class, Object.class); checkMethod(AccessorEntity.class, "setT2", void.class, Object.class); try { checkMethod(AccessorEntity.class, "getTrans2", Object.class); Assertions.fail("transient field should have no getter: trans2"); } catch (NoSuchMethodException x) { } try { checkMethod(AccessorEntity.class, "setTrans2", void.class, Object.class); Assertions.fail("transient field should have no setter: trans2"); } catch (NoSuchMethodException x) { } AccessorEntity entity = new AccessorEntity(); @SuppressWarnings("unused") byte b = entity.b; Assertions.assertEquals(1, entity.getBCalls); entity.i = 2; Assertions.assertEquals(1, entity.setICalls); Object trans = entity.trans; Assertions.assertEquals(0, entity.getTransCalls); entity.trans = trans; Assertions.assertEquals(0, entity.setTransCalls); entity.method(); Assertions.assertEquals(2, entity.getBCalls); Assertions.assertEquals(2, entity.setICalls); return "OK"; } private void checkMethod(Class<?> klass, String name, Class<?> returnType, Class<?>... params) throws NoSuchMethodException, SecurityException { Method method = klass.getMethod(name, params); Assertions.assertEquals(returnType, method.getReturnType()); } @GET @Path("model1") @Transactional public String testModel1() { Assertions.assertEquals(0, Person.count()); Person person = makeSavedPerson(); SelfDirtinessTracker trackingPerson = (SelfDirtinessTracker) person; String[] dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes(); Assertions.assertEquals(0, dirtyAttributes.length); person.name = "1"; dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes(); Assertions.assertEquals(1, dirtyAttributes.length); Assertions.assertEquals(1, Person.count()); return "OK"; } @GET @Path("model2") @Transactional public String testModel2() { Assertions.assertEquals(1, Person.count()); Person person = Person.findAll().firstResult(); Assertions.assertEquals("1", person.name); person.name = "2"; return "OK"; } @GET @Path("model3") @Transactional public String testModel3() { Assertions.assertEquals(1, Person.count()); Person person = Person.findAll().firstResult(); Assertions.assertEquals("2", person.name); Dog.deleteAll(); Person.deleteAll(); Address.deleteAll(); Assertions.assertEquals(0, Person.count()); return "OK"; } @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @GET @Path("ignored-properties") public Person ignoredProperties() throws NoSuchMethodException, SecurityException { Person.class.getMethod("$$_hibernate_read_id"); Person.class.getMethod("$$_hibernate_read_name"); try { Person.class.getMethod("$$_hibernate_read_persistent"); Assertions.fail(); } catch (NoSuchMethodException e) { } Person person = new Person(); person.id = 666l; person.name = "Eddie"; person.status = Status.DECEASED; return person; } @Inject Bug5274EntityRepository bug5274EntityRepository; @GET @Path("5274") @Transactional public String testBug5274() { bug5274EntityRepository.count(); return "OK"; } @Inject Bug5885EntityRepository bug5885EntityRepository; @GET @Path("5885") @Transactional public String testBug5885() { bug5885EntityRepository.findById(1L); return "OK"; } @GET @Path("testJaxbAnnotationTransfer") public String testJaxbAnnotationTransfer() throws Exception { Method m = JAXBEntity.class.getMethod("getNamedAnnotatedProp"); XmlAttribute anno = m.getAnnotation(XmlAttribute.class); assertNotNull(anno); assertEquals("Named", anno.name()); assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getDefaultAnnotatedProp"); anno = m.getAnnotation(XmlAttribute.class); assertNotNull(anno); assertEquals(" assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getUnAnnotatedProp"); assertNull(m.getAnnotation(XmlAttribute.class)); assertNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getTransientProp"); assertNull(m.getAnnotation(XmlAttribute.class)); assertNotNull(m.getAnnotation(XmlTransient.class)); m = JAXBEntity.class.getMethod("getArrayAnnotatedProp"); assertNull(m.getAnnotation(XmlTransient.class)); XmlElements elementsAnno = m.getAnnotation(XmlElements.class); assertNotNull(elementsAnno); assertNotNull(elementsAnno.value()); assertEquals(2, elementsAnno.value().length); assertEquals("array1", elementsAnno.value()[0].name()); assertEquals("array2", elementsAnno.value()[1].name()); ensureFieldSanitized("namedAnnotatedProp"); ensureFieldSanitized("transientProp"); ensureFieldSanitized("defaultAnnotatedProp"); ensureFieldSanitized("unAnnotatedProp"); ensureFieldSanitized("arrayAnnotatedProp"); return "OK"; } private void ensureFieldSanitized(String fieldName) throws Exception { Field f = JAXBEntity.class.getField(fieldName); assertNull(f.getAnnotation(XmlAttribute.class)); assertNotNull(f.getAnnotation(XmlTransient.class)); } @GET @Path("7721") @Transactional public String testBug7721() { Bug7721Entity entity = new Bug7721Entity(); entity.persist(); entity.delete(); return "OK"; } }
I am a little confused, should the concept consistent no matter it is a shared nothing mode or shared data mode. so what's the following three concepts mapping to filestore? * volume id -> uuid * svKey -> ?? * svName -> ??
public static void beforeClass() throws Exception { UtFrameUtils.createMinStarRocksCluster(); connectContext = UtFrameUtils.createDefaultCtx(); String createDbStmtStr = "create database test;"; String createTableStr = "create table test.tbl1(d1 date, k1 int, k2 bigint) duplicate key(d1, k1) " + "PARTITION BY RANGE(d1) (PARTITION p20210201 VALUES [('2021-02-01'), ('2021-02-02'))," + "PARTITION p20210202 VALUES [('2021-02-02'), ('2021-02-03'))," + "PARTITION p20210203 VALUES [('2021-02-03'), ('2021-02-04'))) distributed by hash(k1) " + "buckets 1 properties('replication_num' = '1');"; createDb(createDbStmtStr); createTable(createTableStr); StarOSAgent agent = new StarOSAgent(); FilePathInfo.Builder builder = FilePathInfo.newBuilder(); FileStoreInfo.Builder fsBuilder = builder.getFsInfoBuilder(); S3FileStoreInfo.Builder s3FsBuilder = fsBuilder.getS3FsInfoBuilder(); s3FsBuilder.setBucket("test-bucket"); s3FsBuilder.setRegion("test-region"); s3FsBuilder.setCredential(AwsCredentialInfo.newBuilder() .setDefaultCredential(AwsDefaultCredentialInfo.newBuilder().build())); S3FileStoreInfo s3FsInfo = s3FsBuilder.build(); fsBuilder.setFsType(FileStoreType.S3); fsBuilder.setFsKey("test-bucket"); fsBuilder.setFsName("test-fsname"); fsBuilder.setS3FsInfo(s3FsInfo); FileStoreInfo fsInfo = fsBuilder.build(); builder.setFsInfo(fsInfo); builder.setFullPath("s3: FilePathInfo pathInfo = builder.build(); new Expectations(agent) { { agent.allocateFilePath(anyString, anyLong); result = pathInfo; agent.createShardGroup(anyLong, anyLong, anyLong); result = GlobalStateMgr.getCurrentState().getNextId(); agent.createShards(anyInt, pathInfo, (FileCacheInfo) any, anyLong, (Map<String, String>) any); returns(Lists.newArrayList(10001L, 10002L, 10003L), Lists.newArrayList(10004L, 10005L, 10006L), Lists.newArrayList(10007L, 10008L, 10009L)); agent.getPrimaryComputeNodeIdByShard(anyLong, anyLong); result = GlobalStateMgr.getCurrentSystemInfo().getBackendIds(true).get(0); } }; new MockUp<RunMode>() { @Mock public RunMode getCurrentRunMode() { return RunMode.SHARED_DATA; } }; new MockUp<SharedNothingStorageVolumeMgr>() { @Mock public StorageVolume getStorageVolumeByName(String svKey) throws AnalysisException { return StorageVolume.fromFileStoreInfo(fsInfo); } }; Deencapsulation.setField(GlobalStateMgr.getCurrentState(), "starOSAgent", agent); String createLakeTableStr = "create table test.lake_table(k1 date, k2 int, k3 smallint, v1 varchar(2048), " + "v2 datetime default '2014-02-04 15:36:00')" + " duplicate key(k1, k2, k3)" + " PARTITION BY RANGE(k1, k2, k3)" + " (PARTITION p1 VALUES [(\"2014-01-01\", \"10\", \"200\"), (\"2014-01-01\", \"20\", \"300\"))," + " PARTITION p2 VALUES [(\"2014-06-01\", \"100\", \"200\"), (\"2014-07-01\", \"100\", \"300\")))" + " DISTRIBUTED BY HASH(k2) BUCKETS 3" + " PROPERTIES ( \"enable_storage_cache\" = \"true\", \"storage_cache_ttl\" = \"3600\")"; createTable(createLakeTableStr); }
public StorageVolume getStorageVolumeByName(String svKey) throws AnalysisException {
public static void beforeClass() throws Exception { UtFrameUtils.createMinStarRocksCluster(); connectContext = UtFrameUtils.createDefaultCtx(); String createDbStmtStr = "create database test;"; String createTableStr = "create table test.tbl1(d1 date, k1 int, k2 bigint) duplicate key(d1, k1) " + "PARTITION BY RANGE(d1) (PARTITION p20210201 VALUES [('2021-02-01'), ('2021-02-02'))," + "PARTITION p20210202 VALUES [('2021-02-02'), ('2021-02-03'))," + "PARTITION p20210203 VALUES [('2021-02-03'), ('2021-02-04'))) distributed by hash(k1) " + "buckets 1 properties('replication_num' = '1');"; createDb(createDbStmtStr); createTable(createTableStr); StarOSAgent agent = new StarOSAgent(); FilePathInfo.Builder builder = FilePathInfo.newBuilder(); FileStoreInfo.Builder fsBuilder = builder.getFsInfoBuilder(); S3FileStoreInfo.Builder s3FsBuilder = fsBuilder.getS3FsInfoBuilder(); s3FsBuilder.setBucket("test-bucket"); s3FsBuilder.setRegion("test-region"); s3FsBuilder.setCredential(AwsCredentialInfo.newBuilder() .setDefaultCredential(AwsDefaultCredentialInfo.newBuilder().build())); S3FileStoreInfo s3FsInfo = s3FsBuilder.build(); fsBuilder.setFsType(FileStoreType.S3); fsBuilder.setFsKey("test-bucket"); fsBuilder.setFsName("test-fsname"); fsBuilder.setS3FsInfo(s3FsInfo); FileStoreInfo fsInfo = fsBuilder.build(); builder.setFsInfo(fsInfo); builder.setFullPath("s3: FilePathInfo pathInfo = builder.build(); new Expectations(agent) { { agent.allocateFilePath(anyString, anyLong); result = pathInfo; agent.createShardGroup(anyLong, anyLong, anyLong); result = GlobalStateMgr.getCurrentState().getNextId(); agent.createShards(anyInt, pathInfo, (FileCacheInfo) any, anyLong, (Map<String, String>) any); returns(Lists.newArrayList(10001L, 10002L, 10003L), Lists.newArrayList(10004L, 10005L, 10006L), Lists.newArrayList(10007L, 10008L, 10009L)); agent.getPrimaryComputeNodeIdByShard(anyLong, anyLong); result = GlobalStateMgr.getCurrentSystemInfo().getBackendIds(true).get(0); } }; new MockUp<RunMode>() { @Mock public RunMode getCurrentRunMode() { return RunMode.SHARED_DATA; } }; new MockUp<SharedNothingStorageVolumeMgr>() { @Mock public StorageVolume getStorageVolumeByName(String svName) throws AnalysisException { return StorageVolume.fromFileStoreInfo(fsInfo); } }; Deencapsulation.setField(GlobalStateMgr.getCurrentState(), "starOSAgent", agent); String createLakeTableStr = "create table test.lake_table(k1 date, k2 int, k3 smallint, v1 varchar(2048), " + "v2 datetime default '2014-02-04 15:36:00')" + " duplicate key(k1, k2, k3)" + " PARTITION BY RANGE(k1, k2, k3)" + " (PARTITION p1 VALUES [(\"2014-01-01\", \"10\", \"200\"), (\"2014-01-01\", \"20\", \"300\"))," + " PARTITION p2 VALUES [(\"2014-06-01\", \"100\", \"200\"), (\"2014-07-01\", \"100\", \"300\")))" + " DISTRIBUTED BY HASH(k2) BUCKETS 3" + " PROPERTIES ( \"enable_storage_cache\" = \"true\", \"storage_cache_ttl\" = \"3600\")"; createTable(createLakeTableStr); }
class DropPartitionTest { private static ConnectContext connectContext; @BeforeClass private static void createDb(String sql) throws Exception { CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName()); } private static void createTable(String sql) throws Exception { CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().createTable(createTableStmt); } private static void dropPartition(String sql) throws Exception { AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().alterTable(alterTableStmt); } @Test public void testNormalDropPartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210201"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.tbl1 drop partition p20210201;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210201"); Assert.assertEquals(1, replicaList.size()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p20210201 from test.tbl1"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt); partition = table.getPartition("p20210201"); Assert.assertNotNull(partition); Assert.assertEquals("p20210201", partition.getName()); } @Test public void testForceDropPartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210202"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.tbl1 drop partition p20210202 force;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210202"); Assert.assertTrue(replicaList.isEmpty()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p20210202 from test.tbl1"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "No partition named p20210202 in table tbl1", () -> GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt)); } @Test public void testDropPartitionAndReserveTablets() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210203"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); table.dropPartitionAndReserveTablet("p20210203"); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210203"); Assert.assertEquals(1, replicaList.size()); Assert.assertNull(partition); } @Test public void testNormalDropLakePartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("lake_table"); Partition partition = table.getPartition("p1"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.lake_table drop partition p1;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p1"); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p1 from test.lake_table"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt); partition = table.getPartition("p1"); Assert.assertNotNull(partition); Assert.assertEquals("p1", partition.getName()); } @Test public void testForceDropLakePartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("lake_table"); Partition partition = table.getPartition("p1"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.lake_table drop partition p1 force;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p1"); Assert.assertTrue(replicaList.isEmpty()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p1 from test.lake_table"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "No partition named p1 in table lake_table", () -> GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt)); } }
class DropPartitionTest { private static ConnectContext connectContext; @BeforeClass private static void createDb(String sql) throws Exception { CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName()); } private static void createTable(String sql) throws Exception { CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().createTable(createTableStmt); } private static void dropPartition(String sql) throws Exception { AlterTableStmt alterTableStmt = (AlterTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().alterTable(alterTableStmt); } @Test public void testNormalDropPartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210201"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.tbl1 drop partition p20210201;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210201"); Assert.assertEquals(1, replicaList.size()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p20210201 from test.tbl1"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt); partition = table.getPartition("p20210201"); Assert.assertNotNull(partition); Assert.assertEquals("p20210201", partition.getName()); } @Test public void testForceDropPartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210202"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.tbl1 drop partition p20210202 force;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210202"); Assert.assertTrue(replicaList.isEmpty()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p20210202 from test.tbl1"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "No partition named p20210202 in table tbl1", () -> GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt)); } @Test public void testDropPartitionAndReserveTablets() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("tbl1"); Partition partition = table.getPartition("p20210203"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); table.dropPartitionAndReserveTablet("p20210203"); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p20210203"); Assert.assertEquals(1, replicaList.size()); Assert.assertNull(partition); } @Test public void testNormalDropLakePartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("lake_table"); Partition partition = table.getPartition("p1"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.lake_table drop partition p1;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p1"); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p1 from test.lake_table"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt); partition = table.getPartition("p1"); Assert.assertNotNull(partition); Assert.assertEquals("p1", partition.getName()); } @Test public void testForceDropLakePartition() throws Exception { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("lake_table"); Partition partition = table.getPartition("p1"); long tabletId = partition.getBaseIndex().getTablets().get(0).getId(); String dropPartitionSql = " alter table test.lake_table drop partition p1 force;"; dropPartition(dropPartitionSql); List<Replica> replicaList = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getReplicasByTabletId(tabletId); partition = table.getPartition("p1"); Assert.assertTrue(replicaList.isEmpty()); Assert.assertNull(partition); String recoverPartitionSql = "recover partition p1 from test.lake_table"; RecoverPartitionStmt recoverPartitionStmt = (RecoverPartitionStmt) UtFrameUtils.parseStmtWithNewParser(recoverPartitionSql, connectContext); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "No partition named p1 in table lake_table", () -> GlobalStateMgr.getCurrentState().recoverPartition(recoverPartitionStmt)); } }
I agree. I think we can change the error msg like: ``` data cannot be inserted into table with emtpy partition... Use `SHOW PARTITIONS FROM tbl` to see the currenty partitions of this table. ``` And also change the msg in error code `ERR_EMPTY_PARTITION_IN_TABLE`
private List<Long> getAllPartitionIds() throws LoadException, MetaNotFoundException { Set<Long> partitionIds = Sets.newHashSet(); for (BrokerFileGroup brokerFileGroup : fileGroups) { if (brokerFileGroup.getPartitionIds() != null) { partitionIds.addAll(brokerFileGroup.getPartitionIds()); } break; } if (partitionIds.isEmpty()) { for (Partition partition : table.getPartitions()) { partitionIds.add(partition.getId()); } } if (partitionIds.isEmpty()) { throw new LoadException("data cannot be inserted into table with emtpy partition. " + "[" + table.getName() + "]"); } return Lists.newArrayList(partitionIds); }
throw new LoadException("data cannot be inserted into table with emtpy partition. " +
private List<Long> getAllPartitionIds() throws LoadException, MetaNotFoundException { Set<Long> partitionIds = Sets.newHashSet(); for (BrokerFileGroup brokerFileGroup : fileGroups) { if (brokerFileGroup.getPartitionIds() != null) { partitionIds.addAll(brokerFileGroup.getPartitionIds()); } break; } if (partitionIds.isEmpty()) { for (Partition partition : table.getPartitions()) { partitionIds.add(partition.getId()); } } if (partitionIds.isEmpty()) { throw new LoadException("data cannot be inserted into table with empty partition. " + "Use `SHOW PARTITIONS FROM " + table.getName() + "` to see the currently partitions of this table. "); } return Lists.newArrayList(partitionIds); }
class LoadingTaskPlanner { private static final Logger LOG = LogManager.getLogger(LoadingTaskPlanner.class); private final long loadJobId; private final long txnId; private final long dbId; private final OlapTable table; private final BrokerDesc brokerDesc; private final List<BrokerFileGroup> fileGroups; private final boolean strictMode; private final long timeoutS; private Analyzer analyzer = new Analyzer(Catalog.getCurrentCatalog(), new ConnectContext()); private DescriptorTable descTable = analyzer.getDescTbl(); private List<PlanFragment> fragments = Lists.newArrayList(); private List<ScanNode> scanNodes = Lists.newArrayList(); private int nextNodeId = 0; public LoadingTaskPlanner(Long loadJobId, long txnId, long dbId, OlapTable table, BrokerDesc brokerDesc, List<BrokerFileGroup> brokerFileGroups, boolean strictMode, String timezone, long timeoutS) { this.loadJobId = loadJobId; this.txnId = txnId; this.dbId = dbId; this.table = table; this.brokerDesc = brokerDesc; this.fileGroups = brokerFileGroups; this.strictMode = strictMode; this.analyzer.setTimezone(timezone); this.timeoutS = timeoutS; /* * TODO(cmy): UDF currently belongs to a database. Therefore, before using UDF, * we need to check whether the user has corresponding permissions on this database. * But here we have lost user information and therefore cannot check permissions. * So here we first prohibit users from using UDF in load. If necessary, improve it later. */ this.analyzer.setUDFAllowed(false); } public void plan(TUniqueId loadId, List<List<TBrokerFileStatus>> fileStatusesList, int filesAdded) throws UserException { TupleDescriptor tupleDesc = descTable.createTupleDescriptor(); for (Column col : table.getFullSchema()) { SlotDescriptor slotDesc = descTable.addSlotDescriptor(tupleDesc); slotDesc.setIsMaterialized(true); slotDesc.setColumn(col); if (col.isAllowNull()) { slotDesc.setIsNullable(true); } else { slotDesc.setIsNullable(false); } } BrokerScanNode scanNode = new BrokerScanNode(new PlanNodeId(nextNodeId++), tupleDesc, "BrokerScanNode", fileStatusesList, filesAdded); scanNode.setLoadInfo(loadJobId, txnId, table, brokerDesc, fileGroups, strictMode); scanNode.init(analyzer); scanNode.finalize(analyzer); scanNodes.add(scanNode); descTable.computeMemLayout(); List<Long> partitionIds = getAllPartitionIds(); OlapTableSink olapTableSink = new OlapTableSink(table, tupleDesc, partitionIds); olapTableSink.init(loadId, txnId, dbId, timeoutS); olapTableSink.complete(); PlanFragment sinkFragment = new PlanFragment(new PlanFragmentId(0), scanNode, DataPartition.RANDOM); sinkFragment.setSink(olapTableSink); fragments.add(sinkFragment); for (PlanFragment fragment : fragments) { try { fragment.finalize(analyzer, false); } catch (NotImplementedException e) { LOG.info("Fragment finalize failed.{}", e.getMessage()); throw new UserException("Fragment finalize failed."); } } Collections.reverse(fragments); } public DescriptorTable getDescTable() { return descTable; } public List<PlanFragment> getFragments() { return fragments; } public List<ScanNode> getScanNodes() { return scanNodes; } public String getTimezone() { return analyzer.getTimezone(); } public void updateLoadId(TUniqueId loadId) { for (PlanFragment planFragment : fragments) { if (!(planFragment.getSink() instanceof OlapTableSink)) { continue; } OlapTableSink olapTableSink = (OlapTableSink) planFragment.getSink(); olapTableSink.updateLoadId(loadId); } LOG.info("update olap table sink's load id to {}, job: {}", DebugUtil.printId(loadId), loadJobId); } }
class LoadingTaskPlanner { private static final Logger LOG = LogManager.getLogger(LoadingTaskPlanner.class); private final long loadJobId; private final long txnId; private final long dbId; private final OlapTable table; private final BrokerDesc brokerDesc; private final List<BrokerFileGroup> fileGroups; private final boolean strictMode; private final long timeoutS; private Analyzer analyzer = new Analyzer(Catalog.getCurrentCatalog(), new ConnectContext()); private DescriptorTable descTable = analyzer.getDescTbl(); private List<PlanFragment> fragments = Lists.newArrayList(); private List<ScanNode> scanNodes = Lists.newArrayList(); private int nextNodeId = 0; public LoadingTaskPlanner(Long loadJobId, long txnId, long dbId, OlapTable table, BrokerDesc brokerDesc, List<BrokerFileGroup> brokerFileGroups, boolean strictMode, String timezone, long timeoutS) { this.loadJobId = loadJobId; this.txnId = txnId; this.dbId = dbId; this.table = table; this.brokerDesc = brokerDesc; this.fileGroups = brokerFileGroups; this.strictMode = strictMode; this.analyzer.setTimezone(timezone); this.timeoutS = timeoutS; /* * TODO(cmy): UDF currently belongs to a database. Therefore, before using UDF, * we need to check whether the user has corresponding permissions on this database. * But here we have lost user information and therefore cannot check permissions. * So here we first prohibit users from using UDF in load. If necessary, improve it later. */ this.analyzer.setUDFAllowed(false); } public void plan(TUniqueId loadId, List<List<TBrokerFileStatus>> fileStatusesList, int filesAdded) throws UserException { TupleDescriptor tupleDesc = descTable.createTupleDescriptor(); for (Column col : table.getFullSchema()) { SlotDescriptor slotDesc = descTable.addSlotDescriptor(tupleDesc); slotDesc.setIsMaterialized(true); slotDesc.setColumn(col); if (col.isAllowNull()) { slotDesc.setIsNullable(true); } else { slotDesc.setIsNullable(false); } } BrokerScanNode scanNode = new BrokerScanNode(new PlanNodeId(nextNodeId++), tupleDesc, "BrokerScanNode", fileStatusesList, filesAdded); scanNode.setLoadInfo(loadJobId, txnId, table, brokerDesc, fileGroups, strictMode); scanNode.init(analyzer); scanNode.finalize(analyzer); scanNodes.add(scanNode); descTable.computeMemLayout(); List<Long> partitionIds = getAllPartitionIds(); OlapTableSink olapTableSink = new OlapTableSink(table, tupleDesc, partitionIds); olapTableSink.init(loadId, txnId, dbId, timeoutS); olapTableSink.complete(); PlanFragment sinkFragment = new PlanFragment(new PlanFragmentId(0), scanNode, DataPartition.RANDOM); sinkFragment.setSink(olapTableSink); fragments.add(sinkFragment); for (PlanFragment fragment : fragments) { try { fragment.finalize(analyzer, false); } catch (NotImplementedException e) { LOG.info("Fragment finalize failed.{}", e.getMessage()); throw new UserException("Fragment finalize failed."); } } Collections.reverse(fragments); } public DescriptorTable getDescTable() { return descTable; } public List<PlanFragment> getFragments() { return fragments; } public List<ScanNode> getScanNodes() { return scanNodes; } public String getTimezone() { return analyzer.getTimezone(); } public void updateLoadId(TUniqueId loadId) { for (PlanFragment planFragment : fragments) { if (!(planFragment.getSink() instanceof OlapTableSink)) { continue; } OlapTableSink olapTableSink = (OlapTableSink) planFragment.getSink(); olapTableSink.updateLoadId(loadId); } LOG.info("update olap table sink's load id to {}, job: {}", DebugUtil.printId(loadId), loadJobId); } }
The request will likely fail (usually nothing is running on 8080), but the result is ignored, so it won't affect the test.
public void testInterruptsNotCached() throws Exception { ConnectionID connectionId = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0); AwaitingNettyClient nettyClient = new AwaitingNettyClient(); PartitionRequestClientFactory factory = new PartitionRequestClientFactory(nettyClient, 0); nettyClient.awaitForInterrupts = true; connectAndInterrupt(factory, connectionId); nettyClient.awaitForInterrupts = false; factory.createPartitionRequestClient(connectionId); }
ConnectionID connectionId = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0);
public void testInterruptsNotCached() throws Exception { ConnectionID connectionId = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0); try (AwaitingNettyClient nettyClient = new AwaitingNettyClient()) { PartitionRequestClientFactory factory = new PartitionRequestClientFactory(nettyClient, 0); nettyClient.awaitForInterrupts = true; connectAndInterrupt(factory, connectionId); nettyClient.awaitForInterrupts = false; factory.createPartitionRequestClient(connectionId); } }
class PartitionRequestClientFactoryTest { private static final int SERVER_PORT = NetUtils.getAvailablePort(); @Test private void connectAndInterrupt(PartitionRequestClientFactory factory, ConnectionID connectionId) throws Exception { CompletableFuture<Void> started = new CompletableFuture<>(); CompletableFuture<Void> interrupted = new CompletableFuture<>(); Thread thread = new Thread(() -> { try { started.complete(null); factory.createPartitionRequestClient(connectionId); } catch (InterruptedException e) { interrupted.complete(null); } catch (Exception e) { interrupted.completeExceptionally(e); } }); thread.start(); started.get(); thread.interrupt(); interrupted.get(); } @Test public void testNettyClientConnectRetry() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2); PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); factory.createPartitionRequestClient(serverAddress); serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } @Test(expected = IOException.class) public void testFailureReportedToSubsequentRequests() throws Exception { PartitionRequestClientFactory factory = new PartitionRequestClientFactory(new FailingNettyClient(), 2); try { factory.createPartitionRequestClient(new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0)); } catch (Exception e) { } factory.createPartitionRequestClient(new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0)); } @Test(expected = IOException.class) public void testNettyClientConnectRetryFailure() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 3); try { PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); factory.createPartitionRequestClient(serverAddress); } catch (Exception e) { throw e; } finally { serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } } @Test public void testNettyClientConnectRetryMultipleThread() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2); PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); ExecutorService threadPoolExecutor = Executors.newFixedThreadPool(10); List<Future<NettyPartitionRequestClient>> futures = new ArrayList<>(); for (int i = 0; i < 10; i++) { Future<NettyPartitionRequestClient> future = threadPoolExecutor.submit(new Callable<NettyPartitionRequestClient>() { @Override public NettyPartitionRequestClient call() { NettyPartitionRequestClient client = null; try { client = factory.createPartitionRequestClient(serverAddress); } catch (Exception e) { fail(e.getMessage()); } return client; } }); futures.add(future); } futures.forEach(runnableFuture -> { NettyPartitionRequestClient client = null; try { client = runnableFuture.get(); assertNotNull(client); } catch (Exception e) { System.out.println(e.getMessage()); fail(); } }); threadPoolExecutor.shutdown(); serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } private NettyTestUtil.NettyServerAndClient createNettyServerAndClient() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = NettyTestUtil.initServerAndClient( new NettyProtocol(null, null) { @Override public ChannelHandler[] getServerChannelHandlers () { return new ChannelHandler[10]; } @Override public ChannelHandler[] getClientChannelHandlers () { return new ChannelHandler[]{mock(NetworkClientHandler.class)}; } }); return serverAndClient; } private static class UnstableNettyClient extends NettyClient { private NettyClient nettyClient; private int retry; public UnstableNettyClient(NettyClient nettyClient, int retry) { super(null); this.nettyClient = nettyClient; this.retry = retry; } @Override ChannelFuture connect(final InetSocketAddress serverSocketAddress) { if (retry > 0) { retry--; throw new ChannelException("Simulate connect failure"); } return nettyClient.connect(serverSocketAddress); } } private static class FailingNettyClient extends NettyClient { public FailingNettyClient() { super(null); } @Override ChannelFuture connect(final InetSocketAddress serverSocketAddress) { throw new ChannelException("Simulate connect failure"); } } private class AwaitingNettyClient extends NettyClient { private volatile boolean awaitForInterrupts; AwaitingNettyClient() { super(null); } @Override ChannelFuture connect(InetSocketAddress serverSocketAddress) { if (awaitForInterrupts) { return new NeverCompletingChannelFuture(); } try { return createNettyServerAndClient().client().connect(serverSocketAddress); } catch (Exception exception) { throw new RuntimeException(exception); } } } private static class CountDownLatchOnConnectHandler extends ChannelOutboundHandlerAdapter { private final CountDownLatch syncOnConnect; public CountDownLatchOnConnectHandler(CountDownLatch syncOnConnect) { this.syncOnConnect = syncOnConnect; } @Override public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { syncOnConnect.countDown(); } } private static class UncaughtTestExceptionHandler implements UncaughtExceptionHandler { private final List<Throwable> errors = new ArrayList<Throwable>(1); @Override public void uncaughtException(Thread t, Throwable e) { errors.add(e); } private List<Throwable> getErrors() { return errors; } } private static Tuple2<NettyServer, NettyClient> createNettyServerAndClient(NettyProtocol protocol) throws IOException { final NettyConfig config = new NettyConfig(InetAddress.getLocalHost(), SERVER_PORT, 32 * 1024, 1, new Configuration()); final NettyServer server = new NettyServer(config); final NettyClient client = new NettyClient(config); boolean success = false; try { NettyBufferPool bufferPool = new NettyBufferPool(1); server.init(protocol, bufferPool); client.init(protocol, bufferPool); success = true; } finally { if (!success) { server.shutdown(); client.shutdown(); } } return new Tuple2<NettyServer, NettyClient>(server, client); } private static ConnectionID createServerConnectionID(int connectionIndex) throws UnknownHostException { return new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), SERVER_PORT), connectionIndex); } }
class PartitionRequestClientFactoryTest { private static final int SERVER_PORT = NetUtils.getAvailablePort(); @Test private void connectAndInterrupt(PartitionRequestClientFactory factory, ConnectionID connectionId) throws Exception { CompletableFuture<Void> started = new CompletableFuture<>(); CompletableFuture<Void> interrupted = new CompletableFuture<>(); Thread thread = new Thread(() -> { try { started.complete(null); factory.createPartitionRequestClient(connectionId); } catch (InterruptedException e) { interrupted.complete(null); } catch (Exception e) { interrupted.completeExceptionally(e); } }); thread.start(); started.get(); thread.interrupt(); interrupted.get(); } @Test public void testNettyClientConnectRetry() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2); PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); factory.createPartitionRequestClient(serverAddress); serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } @Test(expected = IOException.class) public void testFailureReportedToSubsequentRequests() throws Exception { PartitionRequestClientFactory factory = new PartitionRequestClientFactory(new FailingNettyClient(), 2); try { factory.createPartitionRequestClient(new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0)); } catch (Exception e) { } factory.createPartitionRequestClient(new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), 8080), 0)); } @Test(expected = IOException.class) public void testNettyClientConnectRetryFailure() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 3); try { PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); factory.createPartitionRequestClient(serverAddress); } catch (Exception e) { throw e; } finally { serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } } @Test public void testNettyClientConnectRetryMultipleThread() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = createNettyServerAndClient(); UnstableNettyClient unstableNettyClient = new UnstableNettyClient(serverAndClient.client(), 2); PartitionRequestClientFactory factory = new PartitionRequestClientFactory(unstableNettyClient, 2); ConnectionID serverAddress = new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), serverAndClient.server().getConfig().getServerPort()), 0); ExecutorService threadPoolExecutor = Executors.newFixedThreadPool(10); List<Future<NettyPartitionRequestClient>> futures = new ArrayList<>(); for (int i = 0; i < 10; i++) { Future<NettyPartitionRequestClient> future = threadPoolExecutor.submit(new Callable<NettyPartitionRequestClient>() { @Override public NettyPartitionRequestClient call() { NettyPartitionRequestClient client = null; try { client = factory.createPartitionRequestClient(serverAddress); } catch (Exception e) { fail(e.getMessage()); } return client; } }); futures.add(future); } futures.forEach(runnableFuture -> { NettyPartitionRequestClient client = null; try { client = runnableFuture.get(); assertNotNull(client); } catch (Exception e) { System.out.println(e.getMessage()); fail(); } }); threadPoolExecutor.shutdown(); serverAndClient.client().shutdown(); serverAndClient.server().shutdown(); } private NettyTestUtil.NettyServerAndClient createNettyServerAndClient() throws Exception { NettyTestUtil.NettyServerAndClient serverAndClient = NettyTestUtil.initServerAndClient( new NettyProtocol(null, null) { @Override public ChannelHandler[] getServerChannelHandlers () { return new ChannelHandler[10]; } @Override public ChannelHandler[] getClientChannelHandlers () { return new ChannelHandler[]{mock(NetworkClientHandler.class)}; } }); return serverAndClient; } private static class UnstableNettyClient extends NettyClient { private NettyClient nettyClient; private int retry; public UnstableNettyClient(NettyClient nettyClient, int retry) { super(null); this.nettyClient = nettyClient; this.retry = retry; } @Override ChannelFuture connect(final InetSocketAddress serverSocketAddress) { if (retry > 0) { retry--; throw new ChannelException("Simulate connect failure"); } return nettyClient.connect(serverSocketAddress); } } private static class FailingNettyClient extends NettyClient { public FailingNettyClient() { super(null); } @Override ChannelFuture connect(final InetSocketAddress serverSocketAddress) { throw new ChannelException("Simulate connect failure"); } } private class AwaitingNettyClient extends NettyClient implements AutoCloseable { private volatile boolean awaitForInterrupts; private final NettyTestUtil.NettyServerAndClient nettyServerAndClient; AwaitingNettyClient() throws Exception { super(null); nettyServerAndClient = createNettyServerAndClient(); } @Override ChannelFuture connect(InetSocketAddress serverSocketAddress) { if (awaitForInterrupts) { return new NeverCompletingChannelFuture(); } try { return nettyServerAndClient.client().connect(serverSocketAddress); } catch (Exception exception) { throw new RuntimeException(exception); } } @Override public void close() throws Exception { if (nettyServerAndClient != null) { nettyServerAndClient.client().shutdown(); nettyServerAndClient.server().shutdown(); } } } private static class CountDownLatchOnConnectHandler extends ChannelOutboundHandlerAdapter { private final CountDownLatch syncOnConnect; public CountDownLatchOnConnectHandler(CountDownLatch syncOnConnect) { this.syncOnConnect = syncOnConnect; } @Override public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { syncOnConnect.countDown(); } } private static class UncaughtTestExceptionHandler implements UncaughtExceptionHandler { private final List<Throwable> errors = new ArrayList<Throwable>(1); @Override public void uncaughtException(Thread t, Throwable e) { errors.add(e); } private List<Throwable> getErrors() { return errors; } } private static Tuple2<NettyServer, NettyClient> createNettyServerAndClient(NettyProtocol protocol) throws IOException { final NettyConfig config = new NettyConfig(InetAddress.getLocalHost(), SERVER_PORT, 32 * 1024, 1, new Configuration()); final NettyServer server = new NettyServer(config); final NettyClient client = new NettyClient(config); boolean success = false; try { NettyBufferPool bufferPool = new NettyBufferPool(1); server.init(protocol, bufferPool); client.init(protocol, bufferPool); success = true; } finally { if (!success) { server.shutdown(); client.shutdown(); } } return new Tuple2<NettyServer, NettyClient>(server, client); } private static ConnectionID createServerConnectionID(int connectionIndex) throws UnknownHostException { return new ConnectionID(new InetSocketAddress(InetAddress.getLocalHost(), SERVER_PORT), connectionIndex); } }
Unfortunately, we cannot directly obtain the information about whether slot desc is nullable or not from Expr.
public TupleDescriptor createTupleDescriptor(Analyzer analyzer) throws AnalysisException { int numColLabels = getColLabels().size(); Preconditions.checkState(numColLabels > 0); Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); List<Column> columnList = Lists.newArrayList(); for (int i = 0; i < numColLabels; ++i) { Expr selectItemExpr = queryStmt.getResultExprs().get(i); String colAlias = getColLabels().get(i); if (columnSet.contains(colAlias)) { throw new AnalysisException( "Duplicated inline view column alias: '" + colAlias + "'" + " in inline view " + "'" + getAlias() + "'"); } columnSet.add(colAlias); columnList.add(new Column(colAlias, selectItemExpr.getType().getPrimitiveType(), true)); } InlineView inlineView = (view != null) ? new InlineView(view, columnList) : new InlineView(getExplicitAlias(), columnList); TupleDescriptor result = analyzer.getDescTbl().createTupleDescriptor(); result.setIsMaterialized(false); result.setTable(inlineView); return result; }
columnList.add(new Column(colAlias, selectItemExpr.getType().getPrimitiveType(), true));
public TupleDescriptor createTupleDescriptor(Analyzer analyzer) throws AnalysisException { int numColLabels = getColLabels().size(); Preconditions.checkState(numColLabels > 0); Set<String> columnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); List<Column> columnList = Lists.newArrayList(); for (int i = 0; i < numColLabels; ++i) { Expr selectItemExpr = queryStmt.getResultExprs().get(i); String colAlias = getColLabels().get(i); if (columnSet.contains(colAlias)) { throw new AnalysisException( "Duplicated inline view column alias: '" + colAlias + "'" + " in inline view " + "'" + getAlias() + "'"); } columnSet.add(colAlias); columnList.add(new Column(colAlias, selectItemExpr.getType().getPrimitiveType(), true)); } InlineView inlineView = (view != null) ? new InlineView(view, columnList) : new InlineView(getExplicitAlias(), columnList); TupleDescriptor result = analyzer.getDescTbl().createTupleDescriptor(); result.setIsMaterialized(false); result.setTable(inlineView); return result; }
class InlineViewRef extends TableRef { private static final Logger LOG = LogManager.getLogger(InlineViewRef.class); private final View view; private List<String> explicitColLabels; private QueryStmt queryStmt; private Analyzer inlineViewAnalyzer; private final ArrayList<TupleId> materializedTupleIds = Lists.newArrayList(); protected final ExprSubstitutionMap sMap; protected final ExprSubstitutionMap baseTblSmap; /** * C'tor for creating inline views parsed directly from the a query string. */ public InlineViewRef(String alias, QueryStmt queryStmt) { super(null, alias); this.queryStmt = queryStmt; this.view = null; sMap = new ExprSubstitutionMap(); baseTblSmap = new ExprSubstitutionMap(); } public InlineViewRef(String alias, QueryStmt queryStmt, List<String> colLabels) { this(alias, queryStmt); explicitColLabels = Lists.newArrayList(colLabels); } /** * C'tor for creating inline views that replace a local or catalog view ref. */ public InlineViewRef(View view, TableRef origTblRef) { super(origTblRef.getName(), origTblRef.getExplicitAlias()); queryStmt = view.getQueryStmt().clone(); if (view.isLocalView()) queryStmt.reset(); this.view = view; sMap = new ExprSubstitutionMap(); baseTblSmap = new ExprSubstitutionMap(); setJoinAttrs(origTblRef); explicitColLabels = view.getColLabels(); if (hasExplicitAlias()) return; if (view.isLocalView()) { aliases_ = new String[] { view.getName() }; } else { aliases_ = new String[] { name.toString(), view.getName() }; } } protected InlineViewRef(InlineViewRef other) { super(other); queryStmt = other.queryStmt.clone(); view = other.view; inlineViewAnalyzer = other.inlineViewAnalyzer; if (other.explicitColLabels != null) { explicitColLabels = Lists.newArrayList(other.explicitColLabels); } materializedTupleIds.addAll(other.materializedTupleIds); sMap = other.sMap.clone(); baseTblSmap = other.baseTblSmap.clone(); } public List<String> getExplicitColLabels() { return explicitColLabels; } public List<String> getColLabels() { if (explicitColLabels != null) { return explicitColLabels; } return queryStmt.getColLabels(); } @Override public void reset() { super.reset(); queryStmt.reset(); inlineViewAnalyzer = null; materializedTupleIds.clear(); sMap.clear(); baseTblSmap.clear(); } @Override public TableRef clone() { return new InlineViewRef(this); } public void setNeedToSql(boolean needToSql) { queryStmt.setNeedToSql(needToSql); } /** * Analyzes the inline view query block in a child analyzer of 'analyzer', creates * a new tuple descriptor for the inline view and registers auxiliary eq predicates * between the slots of that descriptor and the select list exprs of the inline view; * then performs join clause analysis. */ @Override public void analyze(Analyzer analyzer) throws AnalysisException, UserException { if (isAnalyzed) { return; } if (view == null && !hasExplicitAlias()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DERIVED_MUST_HAVE_ALIAS); } inlineViewAnalyzer = new Analyzer(analyzer); queryStmt.analyze(inlineViewAnalyzer); correlatedTupleIds_.addAll(queryStmt.getCorrelatedTupleIds(inlineViewAnalyzer)); queryStmt.getMaterializedTupleIds(materializedTupleIds); if (view != null && !hasExplicitAlias() && !view.isLocalView()) { name = analyzer.getFqTableName(name); aliases_ = new String[] { name.toString(), view.getName() }; } desc = analyzer.registerTableRef(this); isAnalyzed = true; if (materializedTupleIds.isEmpty()) { Preconditions.checkState(queryStmt instanceof SelectStmt); Preconditions.checkState(((SelectStmt) queryStmt).getTableRefs().isEmpty()); desc.setIsMaterialized(true); materializedTupleIds.add(desc.getId()); } for (int i = 0; i < getColLabels().size(); ++i) { String colName = getColLabels().get(i); SlotDescriptor slotDesc = analyzer.registerColumnRef(getAliasAsName(), colName); Expr colExpr = queryStmt.getResultExprs().get(i); SlotRef slotRef = new SlotRef(slotDesc); sMap.put(slotRef, colExpr); baseTblSmap.put(slotRef, queryStmt.getBaseTblResultExprs().get(i)); if (createAuxPredicate(colExpr)) { analyzer.createAuxEquivPredicate(new SlotRef(slotDesc), colExpr.clone()); } } if (LOG.isDebugEnabled()) { LOG.debug("inline view " + getUniqueAlias() + " smap: " + sMap.debugString()); LOG.debug("inline view " + getUniqueAlias() + " baseTblSmap: " + baseTblSmap.debugString()); } analyzeJoin(analyzer); } /** * Checks if an auxiliary predicate should be created for an expr. Returns False if the * inline view has a SELECT stmt with analytic functions and the expr is not in the * common partition exprs of all the analytic functions computed by this inline view. */ public boolean createAuxPredicate(Expr e) { if (!(queryStmt instanceof SelectStmt) || !((SelectStmt) queryStmt).hasAnalyticInfo()) { return true; } AnalyticInfo analyticInfo = ((SelectStmt) queryStmt).getAnalyticInfo(); return analyticInfo.getCommonPartitionExprs().contains(e); } /** * Create a non-materialized tuple descriptor in descTbl for this inline view. * This method is called from the analyzer when registering this inline view. */ @Override /** * Makes each rhs expr in sMap nullable if necessary by wrapping as follows: * IF(TupleIsNull(), NULL, rhs expr) * Should be called only if this inline view is a nullable side of an outer join. * <p/> * We need to make an rhs exprs nullable if it evaluates to a non-NULL value * when all of its contained SlotRefs evaluate to NULL. * For example, constant exprs need to be wrapped or an expr such as * 'case slotref is null then 1 else 2 end' */ protected void makeOutputNullable(Analyzer analyzer) throws AnalysisException, UserException { try { makeOutputNullableHelper(analyzer, sMap); makeOutputNullableHelper(analyzer, baseTblSmap); } catch (Exception e) { throw new IllegalStateException(e); } } protected void makeOutputNullableHelper(Analyzer analyzer, ExprSubstitutionMap smap) throws Exception { List<SlotRef> rhsSlotRefs = Lists.newArrayList(); Expr.collectList(smap.getRhs(), SlotRef.class, rhsSlotRefs); ExprSubstitutionMap nullSMap = new ExprSubstitutionMap(); for (SlotRef rhsSlotRef : rhsSlotRefs) { nullSMap.put(rhsSlotRef.clone(), NullLiteral.create(rhsSlotRef.getType())); } for (int i = 0; i < smap.getRhs().size(); ++i) { List<Expr> params = Lists.newArrayList(); if (!requiresNullWrapping(analyzer, smap.getRhs().get(i), nullSMap)) { continue; } params.add(new TupleIsNullPredicate(materializedTupleIds)); params.add(NullLiteral.create(smap.getRhs().get(i).getType())); params.add(smap.getRhs().get(i)); Expr ifExpr = new FunctionCallExpr("if", params); ifExpr.analyze(analyzer); smap.getRhs().set(i, ifExpr); } } /** * Replaces all SloRefs in expr with a NullLiteral using nullSMap, and evaluates the * resulting constant expr. Returns true if the constant expr yields a non-NULL value, * false otherwise. */ private boolean requiresNullWrapping(Analyzer analyzer, Expr expr, ExprSubstitutionMap nullSMap) throws UserException { if (expr.contains(TupleIsNullPredicate.class)) { return true; } return true; } @Override public void rewriteExprs(ExprRewriter rewriter, Analyzer analyzer) throws AnalysisException { super.rewriteExprs(rewriter, analyzer); queryStmt.rewriteExprs(rewriter); } @Override public List<TupleId> getMaterializedTupleIds() { Preconditions.checkState(isAnalyzed); Preconditions.checkState(materializedTupleIds.size() > 0); return materializedTupleIds; } public QueryStmt getViewStmt() { return queryStmt; } public void setViewStmt(QueryStmt queryStmt) { this.queryStmt = queryStmt; } public Analyzer getAnalyzer() { Preconditions.checkState(isAnalyzed); return inlineViewAnalyzer; } public ExprSubstitutionMap getSmap() { Preconditions.checkState(isAnalyzed); return sMap; } public ExprSubstitutionMap getBaseTblSmap() { Preconditions.checkState(isAnalyzed); return baseTblSmap; } @Override public String tableRefToSql() { String aliasSql = null; String alias = getExplicitAlias(); if (alias != null) aliasSql = ToSqlUtils.getIdentSql(alias); if (view != null) { return name.toSql() + (aliasSql == null ? "" : " " + aliasSql); } StringBuilder sb = new StringBuilder() .append("(") .append(queryStmt.toSql()) .append(") ") .append(aliasSql); return sb.toString(); } }
class InlineViewRef extends TableRef { private static final Logger LOG = LogManager.getLogger(InlineViewRef.class); private final View view; private List<String> explicitColLabels; private QueryStmt queryStmt; private Analyzer inlineViewAnalyzer; private final ArrayList<TupleId> materializedTupleIds = Lists.newArrayList(); protected final ExprSubstitutionMap sMap; protected final ExprSubstitutionMap baseTblSmap; /** * C'tor for creating inline views parsed directly from the a query string. */ public InlineViewRef(String alias, QueryStmt queryStmt) { super(null, alias); this.queryStmt = queryStmt; this.view = null; sMap = new ExprSubstitutionMap(); baseTblSmap = new ExprSubstitutionMap(); } public InlineViewRef(String alias, QueryStmt queryStmt, List<String> colLabels) { this(alias, queryStmt); explicitColLabels = Lists.newArrayList(colLabels); } /** * C'tor for creating inline views that replace a local or catalog view ref. */ public InlineViewRef(View view, TableRef origTblRef) { super(origTblRef.getName(), origTblRef.getExplicitAlias()); queryStmt = view.getQueryStmt().clone(); if (view.isLocalView()) queryStmt.reset(); this.view = view; sMap = new ExprSubstitutionMap(); baseTblSmap = new ExprSubstitutionMap(); setJoinAttrs(origTblRef); explicitColLabels = view.getColLabels(); if (hasExplicitAlias()) return; if (view.isLocalView()) { aliases_ = new String[] { view.getName() }; } else { aliases_ = new String[] { name.toString(), view.getName() }; } } protected InlineViewRef(InlineViewRef other) { super(other); queryStmt = other.queryStmt.clone(); view = other.view; inlineViewAnalyzer = other.inlineViewAnalyzer; if (other.explicitColLabels != null) { explicitColLabels = Lists.newArrayList(other.explicitColLabels); } materializedTupleIds.addAll(other.materializedTupleIds); sMap = other.sMap.clone(); baseTblSmap = other.baseTblSmap.clone(); } public List<String> getExplicitColLabels() { return explicitColLabels; } public List<String> getColLabels() { if (explicitColLabels != null) { return explicitColLabels; } return queryStmt.getColLabels(); } @Override public void reset() { super.reset(); queryStmt.reset(); inlineViewAnalyzer = null; materializedTupleIds.clear(); sMap.clear(); baseTblSmap.clear(); } @Override public TableRef clone() { return new InlineViewRef(this); } public void setNeedToSql(boolean needToSql) { queryStmt.setNeedToSql(needToSql); } /** * Analyzes the inline view query block in a child analyzer of 'analyzer', creates * a new tuple descriptor for the inline view and registers auxiliary eq predicates * between the slots of that descriptor and the select list exprs of the inline view; * then performs join clause analysis. */ @Override public void analyze(Analyzer analyzer) throws AnalysisException, UserException { if (isAnalyzed) { return; } if (view == null && !hasExplicitAlias()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DERIVED_MUST_HAVE_ALIAS); } inlineViewAnalyzer = new Analyzer(analyzer); queryStmt.analyze(inlineViewAnalyzer); correlatedTupleIds_.addAll(queryStmt.getCorrelatedTupleIds(inlineViewAnalyzer)); queryStmt.getMaterializedTupleIds(materializedTupleIds); if (view != null && !hasExplicitAlias() && !view.isLocalView()) { name = analyzer.getFqTableName(name); aliases_ = new String[] { name.toString(), view.getName() }; } desc = analyzer.registerTableRef(this); isAnalyzed = true; if (materializedTupleIds.isEmpty()) { Preconditions.checkState(queryStmt instanceof SelectStmt); Preconditions.checkState(((SelectStmt) queryStmt).getTableRefs().isEmpty()); desc.setIsMaterialized(true); materializedTupleIds.add(desc.getId()); } for (int i = 0; i < getColLabels().size(); ++i) { String colName = getColLabels().get(i); SlotDescriptor slotDesc = analyzer.registerColumnRef(getAliasAsName(), colName); Expr colExpr = queryStmt.getResultExprs().get(i); SlotRef slotRef = new SlotRef(slotDesc); sMap.put(slotRef, colExpr); baseTblSmap.put(slotRef, queryStmt.getBaseTblResultExprs().get(i)); if (createAuxPredicate(colExpr)) { analyzer.createAuxEquivPredicate(new SlotRef(slotDesc), colExpr.clone()); } } if (LOG.isDebugEnabled()) { LOG.debug("inline view " + getUniqueAlias() + " smap: " + sMap.debugString()); LOG.debug("inline view " + getUniqueAlias() + " baseTblSmap: " + baseTblSmap.debugString()); } analyzeJoin(analyzer); } /** * Checks if an auxiliary predicate should be created for an expr. Returns False if the * inline view has a SELECT stmt with analytic functions and the expr is not in the * common partition exprs of all the analytic functions computed by this inline view. */ public boolean createAuxPredicate(Expr e) { if (!(queryStmt instanceof SelectStmt) || !((SelectStmt) queryStmt).hasAnalyticInfo()) { return true; } AnalyticInfo analyticInfo = ((SelectStmt) queryStmt).getAnalyticInfo(); return analyticInfo.getCommonPartitionExprs().contains(e); } /** * Create a non-materialized tuple descriptor in descTbl for this inline view. * This method is called from the analyzer when registering this inline view. */ @Override /** * Makes each rhs expr in sMap nullable if necessary by wrapping as follows: * IF(TupleIsNull(), NULL, rhs expr) * Should be called only if this inline view is a nullable side of an outer join. * <p/> * We need to make an rhs exprs nullable if it evaluates to a non-NULL value * when all of its contained SlotRefs evaluate to NULL. * For example, constant exprs need to be wrapped or an expr such as * 'case slotref is null then 1 else 2 end' */ protected void makeOutputNullable(Analyzer analyzer) throws AnalysisException, UserException { try { makeOutputNullableHelper(analyzer, sMap); makeOutputNullableHelper(analyzer, baseTblSmap); } catch (Exception e) { throw new IllegalStateException(e); } } protected void makeOutputNullableHelper(Analyzer analyzer, ExprSubstitutionMap smap) throws Exception { List<SlotRef> rhsSlotRefs = Lists.newArrayList(); Expr.collectList(smap.getRhs(), SlotRef.class, rhsSlotRefs); ExprSubstitutionMap nullSMap = new ExprSubstitutionMap(); for (SlotRef rhsSlotRef : rhsSlotRefs) { nullSMap.put(rhsSlotRef.clone(), NullLiteral.create(rhsSlotRef.getType())); } for (int i = 0; i < smap.getRhs().size(); ++i) { List<Expr> params = Lists.newArrayList(); if (!requiresNullWrapping(analyzer, smap.getRhs().get(i), nullSMap)) { continue; } params.add(new TupleIsNullPredicate(materializedTupleIds)); params.add(NullLiteral.create(smap.getRhs().get(i).getType())); params.add(smap.getRhs().get(i)); Expr ifExpr = new FunctionCallExpr("if", params); ifExpr.analyze(analyzer); smap.getRhs().set(i, ifExpr); } } /** * Replaces all SloRefs in expr with a NullLiteral using nullSMap, and evaluates the * resulting constant expr. Returns true if the constant expr yields a non-NULL value, * false otherwise. */ private boolean requiresNullWrapping(Analyzer analyzer, Expr expr, ExprSubstitutionMap nullSMap) throws UserException { if (expr.contains(TupleIsNullPredicate.class)) { return true; } return true; } @Override public void rewriteExprs(ExprRewriter rewriter, Analyzer analyzer) throws AnalysisException { super.rewriteExprs(rewriter, analyzer); queryStmt.rewriteExprs(rewriter); } @Override public List<TupleId> getMaterializedTupleIds() { Preconditions.checkState(isAnalyzed); Preconditions.checkState(materializedTupleIds.size() > 0); return materializedTupleIds; } public QueryStmt getViewStmt() { return queryStmt; } public void setViewStmt(QueryStmt queryStmt) { this.queryStmt = queryStmt; } public Analyzer getAnalyzer() { Preconditions.checkState(isAnalyzed); return inlineViewAnalyzer; } public ExprSubstitutionMap getSmap() { Preconditions.checkState(isAnalyzed); return sMap; } public ExprSubstitutionMap getBaseTblSmap() { Preconditions.checkState(isAnalyzed); return baseTblSmap; } @Override public String tableRefToSql() { String aliasSql = null; String alias = getExplicitAlias(); if (alias != null) aliasSql = ToSqlUtils.getIdentSql(alias); if (view != null) { return name.toSql() + (aliasSql == null ? "" : " " + aliasSql); } StringBuilder sb = new StringBuilder() .append("(") .append(queryStmt.toSql()) .append(") ") .append(aliasSql); return sb.toString(); } }
```suggestion // Only tests in packages are executed so that the default packages (i.e. single BAL files), which have the package name ```
public void execute(BuildContext buildContext) { Path sourceRootPath = buildContext.get(BuildContextField.SOURCE_ROOT); Map<BLangPackage, TestarinaClassLoader> programFileMap = new HashMap<>(); List<BLangPackage> moduleBirMap = buildContext.getModules(); for (BLangPackage bLangPackage : moduleBirMap) { PackageID packageID = bLangPackage.packageID; if (!buildContext.moduleDependencyPathMap.containsKey(packageID)) { continue; } Path jarPath = buildContext.getTestJarPathFromTargetCache(packageID); Path modulejarPath = buildContext.getJarPathFromTargetCache(packageID); if (Files.notExists(jarPath)) { jarPath = modulejarPath; } HashSet<Path> moduleDependencies = buildContext.moduleDependencyPathMap.get(packageID).platformLibs; HashSet<Path> dependencyJarPaths = new HashSet<>(moduleDependencies); if (bLangPackage.containsTestablePkg()) { for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) { updateDependencyJarPaths(testablePackage.symbol.imports, buildContext, dependencyJarPaths); } } TestarinaClassLoader classLoader = new TestarinaClassLoader(jarPath, dependencyJarPaths); programFileMap.put(bLangPackage, classLoader); } if (programFileMap.size() > 0) { TesterinaUtils.listTestGroups(sourceRootPath, programFileMap, buildContext.out(), buildContext.err()); } }
public void execute(BuildContext buildContext) { Path sourceRootPath = buildContext.get(BuildContextField.SOURCE_ROOT); Map<BLangPackage, TestarinaClassLoader> programFileMap = new HashMap<>(); List<BLangPackage> moduleBirMap = buildContext.getModules(); for (BLangPackage bLangPackage : moduleBirMap) { PackageID packageID = bLangPackage.packageID; if (!buildContext.moduleDependencyPathMap.containsKey(packageID)) { continue; } Path jarPath = buildContext.getTestJarPathFromTargetCache(packageID); Path modulejarPath = buildContext.getJarPathFromTargetCache(packageID); if (Files.notExists(jarPath)) { jarPath = modulejarPath; } HashSet<Path> moduleDependencies = buildContext.moduleDependencyPathMap.get(packageID).platformLibs; HashSet<Path> dependencyJarPaths = new HashSet<>(moduleDependencies); if (bLangPackage.containsTestablePkg()) { for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) { updateDependencyJarPaths(testablePackage.symbol.imports, buildContext, dependencyJarPaths); } } TestarinaClassLoader classLoader = new TestarinaClassLoader(jarPath, dependencyJarPaths); programFileMap.put(bLangPackage, classLoader); } if (programFileMap.size() > 0) { TesterinaUtils.listTestGroups(sourceRootPath, programFileMap, buildContext.out(), buildContext.err()); } }
class ListTestGroupsTask implements Task { @Override }
class ListTestGroupsTask implements Task { @Override }
u r right, UT and regression test not be processed now
public static LogicalPlanAdapter of(Plan plan) { return new LogicalPlanAdapter((LogicalPlan) plan, null); }
return new LogicalPlanAdapter((LogicalPlan) plan, null);
public static LogicalPlanAdapter of(Plan plan) { return new LogicalPlanAdapter((LogicalPlan) plan, null); }
class LogicalPlanAdapter extends StatementBase implements Queriable { private final StatementContext statementContext; private final LogicalPlan logicalPlan; private List<Expr> resultExprs; private ArrayList<String> colLabels; public LogicalPlanAdapter(LogicalPlan logicalPlan, StatementContext statementContext) { this.logicalPlan = logicalPlan; this.statementContext = statementContext; } @Override public RedirectStatus getRedirectStatus() { return RedirectStatus.NO_FORWARD; } public LogicalPlan getLogicalPlan() { return logicalPlan; } @Override public boolean hasOutFileClause() { return false; } @Override public OutFileClause getOutFileClause() { return null; } public void setResultExprs(List<Expr> resultExprs) { this.resultExprs = resultExprs; } @Override public List<Expr> getResultExprs() { return resultExprs; } public void setColLabels(ArrayList<String> colLabels) { this.colLabels = colLabels; } public ArrayList<String> getColLabels() { return colLabels; } public StatementContext getStatementContext() { return statementContext; } public String toDigest() { return ""; } }
class LogicalPlanAdapter extends StatementBase implements Queriable { private final StatementContext statementContext; private final LogicalPlan logicalPlan; private List<Expr> resultExprs; private ArrayList<String> colLabels; public LogicalPlanAdapter(LogicalPlan logicalPlan, StatementContext statementContext) { this.logicalPlan = logicalPlan; this.statementContext = statementContext; } @Override public RedirectStatus getRedirectStatus() { return RedirectStatus.NO_FORWARD; } public LogicalPlan getLogicalPlan() { return logicalPlan; } @Override public boolean hasOutFileClause() { return false; } @Override public OutFileClause getOutFileClause() { return null; } public ArrayList<String> getColLabels() { return colLabels; } @Override public List<Expr> getResultExprs() { return resultExprs; } public void setResultExprs(List<Expr> resultExprs) { this.resultExprs = resultExprs; } public void setColLabels(ArrayList<String> colLabels) { this.colLabels = colLabels; } public StatementContext getStatementContext() { return statementContext; } public String toDigest() { return ""; } }
Let's trying to using the `org.hamcrest.MatcherAssert.assertThat` in the new introduced test.
public void testServiceClassify() { Assert.assertEquals( KubernetesConfigOptions.ServiceExposedType.ClusterIP, ServiceType.classify(buildExternalServiceWithClusterIP())); Assert.assertEquals( KubernetesConfigOptions.ServiceExposedType.Headless_ClusterIP, ServiceType.classify(buildExternalServiceWithHeadlessClusterIP())); Assert.assertEquals( KubernetesConfigOptions.ServiceExposedType.NodePort, ServiceType.classify(buildExternalServiceWithNodePort())); Assert.assertEquals( KubernetesConfigOptions.ServiceExposedType.LoadBalancer, ServiceType.classify(buildExternalServiceWithLoadBalancer("", ""))); }
Assert.assertEquals(
public void testServiceClassify() { assertThat( ServiceType.classify(buildExternalServiceWithClusterIP()), is(KubernetesConfigOptions.ServiceExposedType.ClusterIP)); assertThat( ServiceType.classify(buildExternalServiceWithHeadlessClusterIP()), is(KubernetesConfigOptions.ServiceExposedType.Headless_ClusterIP)); assertThat( ServiceType.classify(buildExternalServiceWithNodePort()), is(KubernetesConfigOptions.ServiceExposedType.NodePort)); assertThat( ServiceType.classify(buildExternalServiceWithLoadBalancer("", "")), is(KubernetesConfigOptions.ServiceExposedType.LoadBalancer)); }
class ServiceTypeTest extends KubernetesClientTestBase { @Test }
class ServiceTypeTest extends KubernetesClientTestBase { @Test }
Code coverage is actually not supported for single files. I checked. I will remove this TODO comment
private void createReport(final IBundleCoverage bundleCoverage) { boolean containsSourceFiles = true; for (IPackageCoverage packageCoverage : bundleCoverage.getPackages()) { if (TesterinaConstants.DOT.equals(moduleName)) { containsSourceFiles = packageCoverage.getName().isEmpty(); } if (containsSourceFiles) { for (ISourceFileCoverage sourceFileCoverage : packageCoverage.getSourceFiles()) { String sourceFileModule = sourceFileCoverage.getPackageName().split("/")[1]; if (sourceFileCoverage.getName().contains(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !sourceFileCoverage.getName().contains("tests/")) { List<Integer> coveredLines = new ArrayList<>(); List<Integer> missedLines = new ArrayList<>(); for (int i = sourceFileCoverage.getFirstLine(); i <= sourceFileCoverage.getLastLine(); i++) { ILine line = sourceFileCoverage.getLine(i); if (line.getInstructionCounter().getTotalCount() == 0 && line.getBranchCounter().getTotalCount() == 0) { } else if ((line.getBranchCounter().getCoveredCount() == 0 && line.getBranchCounter().getMissedCount() > 0) || line.getStatus() == NOT_COVERED) { missedLines.add(i); } else if (line.getStatus() == PARTLY_COVERED || line.getStatus() == FULLY_COVERED) { coveredLines.add(i); } } if (sourceFileModule.equals(moduleName)) { ModuleCoverage.getInstance().addSourceFileCoverage(sourceFileModule, sourceFileCoverage.getName(), coveredLines, missedLines); } else { String jsonCachePath = this.jsonCache.toString() + "/" + sourceFileCoverage.getPackageName().replace("_", "."); ModuleCoverage.getInstance().updateSourceFileCoverage(jsonCachePath, sourceFileModule, sourceFileCoverage.getName(), coveredLines, missedLines); } } } } } }
private void createReport(final IBundleCoverage bundleCoverage) { boolean containsSourceFiles = true; for (IPackageCoverage packageCoverage : bundleCoverage.getPackages()) { if (TesterinaConstants.DOT.equals(moduleName)) { containsSourceFiles = packageCoverage.getName().isEmpty(); } if (containsSourceFiles) { for (ISourceFileCoverage sourceFileCoverage : packageCoverage.getSourceFiles()) { String sourceFileModule = sourceFileCoverage.getPackageName().split("/")[1]; if (sourceFileCoverage.getName().contains(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !sourceFileCoverage.getName().contains("tests/")) { List<Integer> coveredLines = new ArrayList<>(); List<Integer> missedLines = new ArrayList<>(); for (int i = sourceFileCoverage.getFirstLine(); i <= sourceFileCoverage.getLastLine(); i++) { ILine line = sourceFileCoverage.getLine(i); if (line.getInstructionCounter().getTotalCount() == 0 && line.getBranchCounter().getTotalCount() == 0) { } else if ((line.getBranchCounter().getCoveredCount() == 0 && line.getBranchCounter().getMissedCount() > 0) || line.getStatus() == NOT_COVERED) { missedLines.add(i); } else if (line.getStatus() == PARTLY_COVERED || line.getStatus() == FULLY_COVERED) { coveredLines.add(i); } } if (sourceFileModule.equals(moduleName)) { ModuleCoverage.getInstance().addSourceFileCoverage(sourceFileModule, sourceFileCoverage.getName(), coveredLines, missedLines); } else { String jsonCachePath = this.jsonCache.toString() + resolveSourcePackage(sourceFileCoverage.getPackageName()); ModuleCoverage.getInstance().updateSourceFileCoverage(jsonCachePath, sourceFileModule, sourceFileCoverage.getName(), coveredLines, missedLines); } } } } } }
class per module CodeCoverageUtils.unzipCompiledSource(moduleJarPath, projectDir, orgName, moduleName, version); } catch (NoSuchFileException e) { return; }
class per module CodeCoverageUtils.unzipCompiledSource(moduleJarPath, projectDir, orgName, moduleName, version); } catch (NoSuchFileException e) { return; }
Should we add a more descriptive log message here? There is no `toString` implementation for `DefaultMultipleComponentLeaderElectionService` for now. Or is the memory address of the instance good enough? 🤔
public void close() throws Exception { synchronized (lock) { if (!running) { return; } running = false; LOG.info("Closing {}.", this); ExecutorUtils.gracefulShutdown(10L, TimeUnit.SECONDS, leadershipOperationExecutor); Exception exception = null; try { multipleComponentLeaderElectionDriver.close(); } catch (Exception e) { exception = e; } ExceptionUtils.tryRethrowException(exception); } }
LOG.info("Closing {}.", this);
public void close() throws Exception { synchronized (lock) { if (!running) { return; } running = false; LOG.info("Closing {}.", this.getClass().getSimpleName()); ExecutorUtils.gracefulShutdown(10L, TimeUnit.SECONDS, leadershipOperationExecutor); multipleComponentLeaderElectionDriver.close(); } }
class DefaultMultipleComponentLeaderElectionService implements MultipleComponentLeaderElectionService, MultipleComponentLeaderElectionDriver.Listener { private static final Logger LOG = LoggerFactory.getLogger(DefaultMultipleComponentLeaderElectionService.class); private final Object lock = new Object(); private final MultipleComponentLeaderElectionDriver multipleComponentLeaderElectionDriver; private final FatalErrorHandler fatalErrorHandler; @GuardedBy("lock") private final ExecutorService leadershipOperationExecutor; @GuardedBy("lock") private final Map<String, LeaderElectionEventHandler> leaderElectionEventHandlers; private boolean running = true; @Nullable @GuardedBy("lock") private UUID currentLeaderSessionId = null; @VisibleForTesting DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, String leaderContenderDescription, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory, ExecutorService leadershipOperationExecutor) throws Exception { this.fatalErrorHandler = fatalErrorHandler; this.leadershipOperationExecutor = leadershipOperationExecutor; leaderElectionEventHandlers = new HashMap<>(); multipleComponentLeaderElectionDriver = multipleComponentLeaderElectionDriverFactory.create( leaderContenderDescription, this); } public DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, String leaderContenderDescription, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory) throws Exception { this( fatalErrorHandler, leaderContenderDescription, multipleComponentLeaderElectionDriverFactory, java.util.concurrent.Executors.newSingleThreadExecutor( new ExecutorThreadFactory( String.format( "leadershipOperation-%s", leaderContenderDescription)))); } @Override @Override public LeaderElectionDriverFactory createDriverFactory(String leaderName) { return new MultipleComponentLeaderElectionDriverAdapterFactory(leaderName, this); } @Override public void publishLeaderInformation(String leaderName, LeaderInformation leaderInformation) { try { multipleComponentLeaderElectionDriver.publishLeaderInformation( leaderName, leaderInformation); } catch (Exception e) { fatalErrorHandler.onFatalError( new FlinkException( String.format( "Could not write leader information %s for leader %s.", leaderInformation, leaderName), e)); } } @Override public void registerLeaderElectionEventHandler( String componentId, LeaderElectionEventHandler leaderElectionEventHandler) { synchronized (lock) { Preconditions.checkState( !leaderElectionEventHandlers.containsKey(componentId), "Do not support duplicate LeaderElectionEventHandler registration under %s", componentId); leaderElectionEventHandlers.put(componentId, leaderElectionEventHandler); if (currentLeaderSessionId != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onGrantLeadership(currentLeaderSessionId)); } } } @Override public void unregisterLeaderElectionEventHandler(String componentId) throws Exception { final LeaderElectionEventHandler unregisteredLeaderElectionEventHandler; synchronized (lock) { unregisteredLeaderElectionEventHandler = leaderElectionEventHandlers.remove(componentId); if (unregisteredLeaderElectionEventHandler != null) { leadershipOperationExecutor.execute( unregisteredLeaderElectionEventHandler::onRevokeLeadership); } } multipleComponentLeaderElectionDriver.deleteLeaderInformation(componentId); } @Override public boolean hasLeadership(String leaderName) { synchronized (lock) { Preconditions.checkState(running); return leaderElectionEventHandlers.containsKey(leaderName) && multipleComponentLeaderElectionDriver.hasLeadership(); } } @Override public void isLeader() { final UUID newLeaderSessionId = UUID.randomUUID(); synchronized (lock) { if (!running) { return; } currentLeaderSessionId = UUID.randomUUID(); forEachLeaderElectionEventHandler( leaderElectionEventHandler -> leaderElectionEventHandler.onGrantLeadership(newLeaderSessionId)); } } @Override public void notLeader() { synchronized (lock) { if (!running) { return; } currentLeaderSessionId = null; forEachLeaderElectionEventHandler(LeaderElectionEventHandler::onRevokeLeadership); } } @GuardedBy("lock") private void forEachLeaderElectionEventHandler( Consumer<? super LeaderElectionEventHandler> action) { for (LeaderElectionEventHandler leaderElectionEventHandler : leaderElectionEventHandlers.values()) { leadershipOperationExecutor.execute(() -> action.accept(leaderElectionEventHandler)); } } @Override public void notifyLeaderInformationChange( String leaderName, LeaderInformation leaderInformation) { synchronized (lock) { if (!running) { return; } final LeaderElectionEventHandler leaderElectionEventHandler = leaderElectionEventHandlers.get(leaderName); if (leaderElectionEventHandler != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onLeaderInformationChange( leaderInformation)); } } } @Override public void notifyAllKnownLeaderInformation( Collection<LeaderInformationWithComponentId> leaderInformationWithComponentIds) { synchronized (lock) { if (!running) { return; } final Map<String, LeaderInformation> leaderInformationByName = leaderInformationWithComponentIds.stream() .collect( Collectors.toMap( LeaderInformationWithComponentId::getComponentId, LeaderInformationWithComponentId ::getLeaderInformation)); for (Map.Entry<String, LeaderElectionEventHandler> leaderNameLeaderElectionEventHandlerPair : leaderElectionEventHandlers.entrySet()) { final String leaderName = leaderNameLeaderElectionEventHandlerPair.getKey(); if (leaderInformationByName.containsKey(leaderName)) { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(leaderInformationByName.get(leaderName)); } else { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(LeaderInformation.empty()); } } } } }
class DefaultMultipleComponentLeaderElectionService implements MultipleComponentLeaderElectionService, MultipleComponentLeaderElectionDriver.Listener { private static final Logger LOG = LoggerFactory.getLogger(DefaultMultipleComponentLeaderElectionService.class); private final Object lock = new Object(); private final MultipleComponentLeaderElectionDriver multipleComponentLeaderElectionDriver; private final FatalErrorHandler fatalErrorHandler; @GuardedBy("lock") private final ExecutorService leadershipOperationExecutor; @GuardedBy("lock") private final Map<String, LeaderElectionEventHandler> leaderElectionEventHandlers; @GuardedBy("lock") private boolean running = true; @Nullable @GuardedBy("lock") private UUID currentLeaderSessionId = null; @VisibleForTesting DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory, ExecutorService leadershipOperationExecutor) throws Exception { this.fatalErrorHandler = Preconditions.checkNotNull(fatalErrorHandler); this.leadershipOperationExecutor = Preconditions.checkNotNull(leadershipOperationExecutor); leaderElectionEventHandlers = new HashMap<>(); multipleComponentLeaderElectionDriver = multipleComponentLeaderElectionDriverFactory.create(this); } public DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory) throws Exception { this( fatalErrorHandler, multipleComponentLeaderElectionDriverFactory, Executors.newSingleThreadExecutor( new ExecutorThreadFactory("leadershipOperationExecutor"))); } @Override @Override public LeaderElectionDriverFactory createDriverFactory(String componentId) { return new MultipleComponentLeaderElectionDriverAdapterFactory(componentId, this); } @Override public void publishLeaderInformation(String componentId, LeaderInformation leaderInformation) { try { multipleComponentLeaderElectionDriver.publishLeaderInformation( componentId, leaderInformation); } catch (Exception e) { fatalErrorHandler.onFatalError( new FlinkException( String.format( "Could not write leader information %s for leader %s.", leaderInformation, componentId), e)); } } @Override public void registerLeaderElectionEventHandler( String componentId, LeaderElectionEventHandler leaderElectionEventHandler) { synchronized (lock) { Preconditions.checkArgument( !leaderElectionEventHandlers.containsKey(componentId), "Do not support duplicate LeaderElectionEventHandler registration under %s", componentId); leaderElectionEventHandlers.put(componentId, leaderElectionEventHandler); if (currentLeaderSessionId != null) { final UUID leaderSessionId = currentLeaderSessionId; leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onGrantLeadership(leaderSessionId)); } } } @Override public void unregisterLeaderElectionEventHandler(String componentId) throws Exception { final LeaderElectionEventHandler unregisteredLeaderElectionEventHandler; synchronized (lock) { unregisteredLeaderElectionEventHandler = leaderElectionEventHandlers.remove(componentId); if (unregisteredLeaderElectionEventHandler != null) { leadershipOperationExecutor.execute( unregisteredLeaderElectionEventHandler::onRevokeLeadership); } else { LOG.debug( "Could not find leader election event handler for componentId {}. Ignoring the unregister call.", componentId); } } multipleComponentLeaderElectionDriver.deleteLeaderInformation(componentId); } @Override public boolean hasLeadership(String componentId) { synchronized (lock) { Preconditions.checkState(running); return leaderElectionEventHandlers.containsKey(componentId) && multipleComponentLeaderElectionDriver.hasLeadership(); } } @Override public void isLeader() { final UUID newLeaderSessionId = UUID.randomUUID(); synchronized (lock) { if (!running) { return; } currentLeaderSessionId = UUID.randomUUID(); forEachLeaderElectionEventHandler( leaderElectionEventHandler -> leaderElectionEventHandler.onGrantLeadership(newLeaderSessionId)); } } @Override public void notLeader() { synchronized (lock) { if (!running) { return; } currentLeaderSessionId = null; forEachLeaderElectionEventHandler(LeaderElectionEventHandler::onRevokeLeadership); } } @GuardedBy("lock") private void forEachLeaderElectionEventHandler( Consumer<? super LeaderElectionEventHandler> action) { for (LeaderElectionEventHandler leaderElectionEventHandler : leaderElectionEventHandlers.values()) { leadershipOperationExecutor.execute(() -> action.accept(leaderElectionEventHandler)); } } @Override public void notifyLeaderInformationChange( String componentId, LeaderInformation leaderInformation) { synchronized (lock) { if (!running) { return; } final LeaderElectionEventHandler leaderElectionEventHandler = leaderElectionEventHandlers.get(componentId); if (leaderElectionEventHandler != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onLeaderInformationChange( leaderInformation)); } } } @Override public void notifyAllKnownLeaderInformation( Collection<LeaderInformationWithComponentId> leaderInformationWithComponentIds) { synchronized (lock) { if (!running) { return; } final Map<String, LeaderInformation> leaderInformationByName = leaderInformationWithComponentIds.stream() .collect( Collectors.toMap( LeaderInformationWithComponentId::getComponentId, LeaderInformationWithComponentId ::getLeaderInformation)); for (Map.Entry<String, LeaderElectionEventHandler> leaderNameLeaderElectionEventHandlerPair : leaderElectionEventHandlers.entrySet()) { final String leaderName = leaderNameLeaderElectionEventHandlerPair.getKey(); if (leaderInformationByName.containsKey(leaderName)) { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(leaderInformationByName.get(leaderName)); } else { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(LeaderInformation.empty()); } } } } }