comment
stringlengths
22
3.02k
method_body
stringlengths
46
368k
target_code
stringlengths
0
181
method_body_after
stringlengths
12
368k
context_before
stringlengths
11
634k
context_after
stringlengths
11
632k
The base class' ChangeFeedStartFromInternal.populatePropertyBag is a no-op as is its baseclass JsonSerializable's populatePropertyBag method.
public void populatePropertyBag() { super.populatePropertyBag(); synchronized(this) { setProperty( this, Constants.Properties.CHANGE_FEED_START_FROM_TYPE, ChangeFeedStartFromTypes.NOW); } }
super.populatePropertyBag();
public void populatePropertyBag() { super.populatePropertyBag(); synchronized(this) { setProperty( this, Constants.Properties.CHANGE_FEED_START_FROM_TYPE, ChangeFeedStartFromTypes.NOW); } }
class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal { public ChangeFeedStartFromNowImpl() { super(); } @Override @Override public boolean supportsFullFidelityRetention() { return true; } @Override public void populateRequest(RxDocumentServiceRequest request) { checkNotNull(request, "Argument 'request' must not be null."); request.getHeaders().put( HttpConstants.HttpHeaders.IF_NONE_MATCH, HttpConstants.HeaderValues.IF_NONE_MATCH_ALL); } }
class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal { public ChangeFeedStartFromNowImpl() { super(); } @Override @Override public boolean supportsFullFidelityRetention() { return true; } @Override public void populateRequest(RxDocumentServiceRequest request) { checkNotNull(request, "Argument 'request' must not be null."); request.getHeaders().put( HttpConstants.HttpHeaders.IF_NONE_MATCH, HttpConstants.HeaderValues.IF_NONE_MATCH_ALL); } }
please update the `convertToRowData` method based on the values of `remainingPartitions`
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { if (isInsertOnly) { return ChangelogMode.insertOnly(); } else { ChangelogMode.Builder builder = ChangelogMode.newBuilder(); if (schema.getPrimaryKey().isPresent()) { for (RowKind kind : requestedMode.getContainedKinds()) { if (kind != RowKind.UPDATE_BEFORE) { builder.addContainedKind(kind); } } return builder.build(); } else { return requestedMode; } } }
builder.addContainedKind(kind);
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { if (isInsertOnly) { return ChangelogMode.insertOnly(); } else { ChangelogMode.Builder builder = ChangelogMode.newBuilder(); if (schema.getPrimaryKey().isPresent()) { for (RowKind kind : requestedMode.getContainedKinds()) { if (kind != RowKind.UPDATE_BEFORE) { builder.addContainedKind(kind); } } return builder.build(); } else { return requestedMode; } } }
class TestValuesTableSink implements DynamicTableSink { private final TableSchema schema; private final String tableName; private final boolean isInsertOnly; private final String runtimeSink; private final int expectedNum; private TestValuesTableSink( TableSchema schema, String tableName, boolean isInsertOnly, String runtimeSink, int expectedNum) { this.schema = schema; this.tableName = tableName; this.isInsertOnly = isInsertOnly; this.runtimeSink = runtimeSink; this.expectedNum = expectedNum; } @Override @Override public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { DataStructureConverter converter = context.createDataStructureConverter(schema.toPhysicalRowDataType()); if (isInsertOnly) { checkArgument(expectedNum == -1, "Appending Sink doesn't support '" + SINK_EXPECTED_MESSAGES_NUM.key() + "' yet."); if (runtimeSink.equals("SinkFunction")) { return SinkFunctionProvider.of( new AppendingSinkFunction( tableName, converter)); } else if (runtimeSink.equals("OutputFormat")) { return OutputFormatProvider.of( new AppendingOutputFormat( tableName, converter)); } else { throw new IllegalArgumentException("Unsupported runtime sink class: " + runtimeSink); } } else { assert runtimeSink.equals("SinkFunction"); SinkFunction<RowData> sinkFunction; if (schema.getPrimaryKey().isPresent()) { int[] keyIndices = TableSchemaUtils.getPrimaryKeyIndices(schema); sinkFunction = new KeyedUpsertingSinkFunction( tableName, converter, keyIndices, expectedNum); } else { checkArgument(expectedNum == -1, "Retracting Sink doesn't support '" + SINK_EXPECTED_MESSAGES_NUM.key() + "' yet."); sinkFunction = new RetractingSinkFunction( tableName, converter); } return SinkFunctionProvider.of(sinkFunction); } } @Override public DynamicTableSink copy() { return new TestValuesTableSink( schema, tableName, isInsertOnly, runtimeSink, expectedNum); } @Override public String asSummaryString() { return "TestValues"; } }
class TestValuesTableSink implements DynamicTableSink { private final TableSchema schema; private final String tableName; private final boolean isInsertOnly; private final String runtimeSink; private final int expectedNum; private TestValuesTableSink( TableSchema schema, String tableName, boolean isInsertOnly, String runtimeSink, int expectedNum) { this.schema = schema; this.tableName = tableName; this.isInsertOnly = isInsertOnly; this.runtimeSink = runtimeSink; this.expectedNum = expectedNum; } @Override @Override public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { DataStructureConverter converter = context.createDataStructureConverter(schema.toPhysicalRowDataType()); if (isInsertOnly) { checkArgument(expectedNum == -1, "Appending Sink doesn't support '" + SINK_EXPECTED_MESSAGES_NUM.key() + "' yet."); if (runtimeSink.equals("SinkFunction")) { return SinkFunctionProvider.of( new AppendingSinkFunction( tableName, converter)); } else if (runtimeSink.equals("OutputFormat")) { return OutputFormatProvider.of( new AppendingOutputFormat( tableName, converter)); } else { throw new IllegalArgumentException("Unsupported runtime sink class: " + runtimeSink); } } else { assert runtimeSink.equals("SinkFunction"); SinkFunction<RowData> sinkFunction; if (schema.getPrimaryKey().isPresent()) { int[] keyIndices = TableSchemaUtils.getPrimaryKeyIndices(schema); sinkFunction = new KeyedUpsertingSinkFunction( tableName, converter, keyIndices, expectedNum); } else { checkArgument(expectedNum == -1, "Retracting Sink doesn't support '" + SINK_EXPECTED_MESSAGES_NUM.key() + "' yet."); sinkFunction = new RetractingSinkFunction( tableName, converter); } return SinkFunctionProvider.of(sinkFunction); } } @Override public DynamicTableSink copy() { return new TestValuesTableSink( schema, tableName, isInsertOnly, runtimeSink, expectedNum); } @Override public String asSummaryString() { return "TestValues"; } }
In the mv rewrite case, there are only join and scan operator.
private OptExpression pushdownPredicatesForJoin(OptExpression optExpression, ScalarOperator predicate) { if (!(optExpression.getOp() instanceof LogicalJoinOperator)) { if (predicate != null) { Operator.Builder builder = OperatorBuilderFactory.build(optExpression.getOp()); builder.withOperator(optExpression.getOp()); builder.setPredicate(predicate); Operator newQueryOp = builder.build(); return OptExpression.create(newQueryOp, optExpression.getInputs()); } else { return optExpression; } } OptExpression newJoin = doPushdownPredicate(optExpression, predicate); List<OptExpression> children = Lists.newArrayList(); for (int i = 0; i < 2; i++) { if (optExpression.inputAt(i).getOp() instanceof LogicalJoinOperator) { children.add(pushdownPredicatesForJoin(optExpression.inputAt(i), null)); } else { children.add(optExpression.inputAt(i)); } } return OptExpression.create(newJoin.getOp(), children); }
if (optExpression.inputAt(i).getOp() instanceof LogicalJoinOperator) {
private OptExpression pushdownPredicatesForJoin(OptExpression optExpression, ScalarOperator predicate) { if (!(optExpression.getOp() instanceof LogicalJoinOperator)) { if (predicate != null) { Operator.Builder builder = OperatorBuilderFactory.build(optExpression.getOp()); builder.withOperator(optExpression.getOp()); builder.setPredicate(Utils.compoundAnd(predicate, optExpression.getOp().getPredicate())); Operator newQueryOp = builder.build(); return OptExpression.create(newQueryOp, optExpression.getInputs()); } else { return optExpression; } } OptExpression newJoin = doPushdownPredicate(optExpression, predicate); List<OptExpression> children = Lists.newArrayList(); for (int i = 0; i < 2; i++) { if (optExpression.inputAt(i).getOp() instanceof LogicalJoinOperator) { children.add(pushdownPredicatesForJoin(optExpression.inputAt(i), null)); } else { children.add(optExpression.inputAt(i)); } } return OptExpression.create(newJoin.getOp(), children); }
class scalar operators may generate the same rewritten conjunct. .collect(Collectors.toList()); if (rewrittenConjuncts.isEmpty()) { return null; }
class scalar operators may generate the same rewritten conjunct. .collect(Collectors.toList()); if (rewrittenConjuncts.isEmpty()) { return null; }
I think that this implies that we have to take better care of allowed lateness here. Elements that are more late than allowed lateness are dropped by any stateful dofn, this is no new behavior.
private boolean isLate(BoundedWindow window) { Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); Instant inputWM = stepContext.timerInternals().currentInputWatermarkTime(); return gcTime.isBefore(inputWM); }
return gcTime.isBefore(inputWM);
private boolean isLate(BoundedWindow window) { Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); Instant inputWM = stepContext.timerInternals().currentInputWatermarkTime(); return gcTime.isBefore(inputWM); }
class StatefulDoFnRunner<InputT, OutputT, W extends BoundedWindow> implements DoFnRunner<InputT, OutputT> { public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "StatefulParDoDropped"; private static final String SORT_BUFFER_STATE = "sortBuffer"; private static final String SORT_BUFFER_MIN_STAMP = "sortBufferMinStamp"; private static final String SORT_FLUSH_TIMER = "__StatefulParDoSortFlushTimerId"; private static final String SORT_FLUSH_WATERMARK_HOLD = "flushWatermarkHold"; private final DoFnRunner<InputT, OutputT> doFnRunner; private final StepContext stepContext; private final WindowingStrategy<?, ?> windowingStrategy; private final Counter droppedDueToLateness = Metrics.counter(StatefulDoFnRunner.class, DROPPED_DUE_TO_LATENESS_COUNTER); private final CleanupTimer<InputT> cleanupTimer; private final StateCleaner stateCleaner; private final boolean requiresTimeSortedInput; private final Coder<BoundedWindow> windowCoder; private final StateTag<BagState<WindowedValue<InputT>>> sortBufferTag; private final StateTag<ValueState<Instant>> sortBufferMinStampTag = StateTags.makeSystemTagInternal(StateTags.value(SORT_BUFFER_MIN_STAMP, InstantCoder.of())); private final StateTag<WatermarkHoldState> watermarkHold = StateTags.watermarkStateInternal(SORT_FLUSH_WATERMARK_HOLD, TimestampCombiner.LATEST); public StatefulDoFnRunner( DoFnRunner<InputT, OutputT> doFnRunner, Coder<InputT> inputCoder, StepContext stepContext, WindowingStrategy<?, ?> windowingStrategy, CleanupTimer<InputT> cleanupTimer, StateCleaner<W> stateCleaner, boolean requiresTimeSortedInput) { this.doFnRunner = doFnRunner; this.stepContext = stepContext; this.windowingStrategy = windowingStrategy; this.cleanupTimer = cleanupTimer; this.stateCleaner = stateCleaner; this.requiresTimeSortedInput = requiresTimeSortedInput; WindowFn<?, ?> windowFn = windowingStrategy.getWindowFn(); @SuppressWarnings("unchecked") Coder<BoundedWindow> untypedCoder = (Coder<BoundedWindow>) windowFn.windowCoder(); this.windowCoder = untypedCoder; this.sortBufferTag = StateTags.makeSystemTagInternal( StateTags.bag(SORT_BUFFER_STATE, WindowedValue.getFullCoder(inputCoder, windowCoder))); rejectMergingWindowFn(windowFn); } private void rejectMergingWindowFn(WindowFn<?, ?> windowFn) { if (!(windowFn instanceof NonMergingWindowFn)) { throw new UnsupportedOperationException( "MergingWindowFn is not supported for stateful DoFns, WindowFn is: " + windowFn); } } public List<StateTag<?>> getSystemStateTags() { return Arrays.asList(sortBufferTag, sortBufferMinStampTag, watermarkHold); } @Override public DoFn<InputT, OutputT> getFn() { return doFnRunner.getFn(); } @Override public void startBundle() { doFnRunner.startBundle(); } @Override public void finishBundle() { doFnRunner.finishBundle(); } @Override public void processElement(WindowedValue<InputT> input) { for (WindowedValue<InputT> value : input.explodeWindows()) { BoundedWindow window = value.getWindows().iterator().next(); if (isLate(window)) { reportDroppedElement(value, window); } else if (requiresTimeSortedInput) { processElementOrdered(window, value); } else { processElementUnordered(window, value); } } } private void processElementUnordered(BoundedWindow window, WindowedValue<InputT> value) { cleanupTimer.setForWindow(value.getValue(), window); doFnRunner.processElement(value); } private void processElementOrdered(BoundedWindow window, WindowedValue<InputT> value) { StateInternals stateInternals = stepContext.stateInternals(); TimerInternals timerInternals = stepContext.timerInternals(); if (!timerInternals.currentInputWatermarkTime().isAfter(value.getTimestamp())) { StateNamespace namespace = StateNamespaces.window(windowCoder, window); BagState<WindowedValue<InputT>> sortBuffer = stateInternals.state(namespace, sortBufferTag); ValueState<Instant> minStampState = stateInternals.state(namespace, sortBufferMinStampTag); sortBuffer.add(value); Instant minStamp = MoreObjects.firstNonNull(minStampState.read(), BoundedWindow.TIMESTAMP_MAX_VALUE); if (value.getTimestamp().isBefore(minStamp)) { minStamp = value.getTimestamp(); minStampState.write(minStamp); setupFlushTimerAndWatermarkHold(namespace, minStamp); } } else { reportDroppedElement(value, window); } } private void reportDroppedElement(WindowedValue<InputT> value, BoundedWindow window) { droppedDueToLateness.inc(); WindowTracing.debug( "StatefulDoFnRunner.processElement: Dropping element at {}; window:{} " + "since too far behind inputWatermark:{}", value.getTimestamp(), window, stepContext.timerInternals().currentInputWatermarkTime()); } @Override public void onTimer( String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) { if (timerId.equals(SORT_FLUSH_TIMER)) { onSortFlushTimer(window, stepContext.timerInternals().currentInputWatermarkTime()); } else if (cleanupTimer.isForWindow(timerId, window, timestamp, timeDomain)) { stateCleaner.clearForWindow(window); } else { if (!timeDomain.equals(TimeDomain.EVENT_TIME) && isLate(window)) { WindowTracing.debug( "StatefulDoFnRunner.onTimer: Ignoring processing-time timer at {}; window:{} " + "since window is too far behind inputWatermark:{}", timestamp, window, stepContext.timerInternals().currentInputWatermarkTime()); } else { doFnRunner.onTimer(timerId, window, timestamp, timeDomain); } } } private void onSortFlushTimer(BoundedWindow window, Instant timestamp) { StateInternals stateInternals = stepContext.stateInternals(); StateNamespace namespace = StateNamespaces.window(windowCoder, window); BagState<WindowedValue<InputT>> sortBuffer = stateInternals.state(namespace, sortBufferTag); ValueState<Instant> minStampState = stateInternals.state(namespace, sortBufferMinStampTag); List<WindowedValue<InputT>> keep = new ArrayList<>(); List<WindowedValue<InputT>> flush = new ArrayList<>(); Instant newMinStamp = BoundedWindow.TIMESTAMP_MAX_VALUE; for (WindowedValue<InputT> e : sortBuffer.read()) { if (!e.getTimestamp().isAfter(timestamp)) { flush.add(e); } else { keep.add(e); if (e.getTimestamp().isBefore(newMinStamp)) { newMinStamp = e.getTimestamp(); } } } flush.stream() .sorted((a, b) -> a.getTimestamp().compareTo(b.getTimestamp())) .forEachOrdered(e -> processElementUnordered(window, e)); sortBuffer.clear(); keep.forEach(sortBuffer::add); minStampState.write(newMinStamp); if (newMinStamp.isBefore(BoundedWindow.TIMESTAMP_MAX_VALUE)) { setupFlushTimerAndWatermarkHold(namespace, newMinStamp); } else { clearWatermarkHold(namespace); } } private void setupFlushTimerAndWatermarkHold(StateNamespace namespace, Instant flush) { WatermarkHoldState watermark = stepContext.stateInternals().state(namespace, watermarkHold); stepContext .timerInternals() .setTimer(namespace, SORT_FLUSH_TIMER, flush, TimeDomain.EVENT_TIME); watermark.clear(); watermark.add(flush); } private void clearWatermarkHold(StateNamespace namespace) { stepContext.stateInternals().state(namespace, watermarkHold).clear(); } /** * A cleaner for deciding when to clean state of window. * * <p>A runner might either (a) already know that it always has a timer set for the expiration * time or (b) not need a timer at all because it is a batch runner that discards state when it is * done. */ public interface CleanupTimer<InputT> { /** Set the garbage collect time of the window to timer. */ void setForWindow(InputT value, BoundedWindow window); /** Checks whether the given timer is a cleanup timer for the window. */ boolean isForWindow( String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain); } /** A cleaner to clean all states of the window. */ public interface StateCleaner<W extends BoundedWindow> { void clearForWindow(W window); } /** A {@link StatefulDoFnRunner.CleanupTimer} implemented via {@link TimerInternals}. */ public static class TimeInternalsCleanupTimer<InputT> implements StatefulDoFnRunner.CleanupTimer<InputT> { public static final String GC_TIMER_ID = "__StatefulParDoGcTimerId"; /** * The amount of milliseconds by which to delay cleanup. We use this to ensure that state is * still available when a user timer for {@code window.maxTimestamp()} fires. */ public static final long GC_DELAY_MS = 1; private final TimerInternals timerInternals; private final WindowingStrategy<?, ?> windowingStrategy; private final Coder<BoundedWindow> windowCoder; public TimeInternalsCleanupTimer( TimerInternals timerInternals, WindowingStrategy<?, ?> windowingStrategy) { this.windowingStrategy = windowingStrategy; WindowFn<?, ?> windowFn = windowingStrategy.getWindowFn(); windowCoder = (Coder<BoundedWindow>) windowFn.windowCoder(); this.timerInternals = timerInternals; } @Override public void setForWindow(InputT input, BoundedWindow window) { Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); gcTime = gcTime.plus(GC_DELAY_MS); timerInternals.setTimer( StateNamespaces.window(windowCoder, window), GC_TIMER_ID, gcTime, TimeDomain.EVENT_TIME); } @Override public boolean isForWindow( String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) { boolean isEventTimer = timeDomain.equals(TimeDomain.EVENT_TIME); Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); gcTime = gcTime.plus(GC_DELAY_MS); return isEventTimer && GC_TIMER_ID.equals(timerId) && gcTime.equals(timestamp); } } /** A {@link StatefulDoFnRunner.StateCleaner} implemented via {@link StateInternals}. */ public static class StateInternalsStateCleaner<W extends BoundedWindow> implements StatefulDoFnRunner.StateCleaner<W> { private final DoFn<?, ?> fn; private final DoFnSignature signature; private final StateInternals stateInternals; private final Coder<W> windowCoder; public StateInternalsStateCleaner( DoFn<?, ?> fn, StateInternals stateInternals, Coder<W> windowCoder) { this.fn = fn; this.signature = DoFnSignatures.getSignature(fn.getClass()); this.stateInternals = stateInternals; this.windowCoder = windowCoder; } @Override public void clearForWindow(W window) { for (Map.Entry<String, DoFnSignature.StateDeclaration> entry : signature.stateDeclarations().entrySet()) { try { StateSpec<?> spec = (StateSpec<?>) entry.getValue().field().get(fn); State state = stateInternals.state( StateNamespaces.window(windowCoder, window), StateTags.tagForSpec(entry.getKey(), (StateSpec) spec)); state.clear(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } } } }
class StatefulDoFnRunner<InputT, OutputT, W extends BoundedWindow> implements DoFnRunner<InputT, OutputT> { public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "StatefulParDoDropped"; private static final String SORT_BUFFER_STATE = "sortBuffer"; private static final String SORT_BUFFER_MIN_STAMP = "sortBufferMinStamp"; private static final String SORT_FLUSH_TIMER = "__StatefulParDoSortFlushTimerId"; private static final String SORT_FLUSH_WATERMARK_HOLD = "flushWatermarkHold"; private final DoFnRunner<InputT, OutputT> doFnRunner; private final StepContext stepContext; private final WindowingStrategy<?, ?> windowingStrategy; private final Counter droppedDueToLateness = Metrics.counter(StatefulDoFnRunner.class, DROPPED_DUE_TO_LATENESS_COUNTER); private final CleanupTimer<InputT> cleanupTimer; private final StateCleaner stateCleaner; private final boolean requiresTimeSortedInput; private final Coder<BoundedWindow> windowCoder; private final StateTag<BagState<WindowedValue<InputT>>> sortBufferTag; private final StateTag<ValueState<Instant>> sortBufferMinStampTag = StateTags.makeSystemTagInternal(StateTags.value(SORT_BUFFER_MIN_STAMP, InstantCoder.of())); private final StateTag<WatermarkHoldState> watermarkHold = StateTags.watermarkStateInternal(SORT_FLUSH_WATERMARK_HOLD, TimestampCombiner.LATEST); public StatefulDoFnRunner( DoFnRunner<InputT, OutputT> doFnRunner, Coder<InputT> inputCoder, StepContext stepContext, WindowingStrategy<?, ?> windowingStrategy, CleanupTimer<InputT> cleanupTimer, StateCleaner<W> stateCleaner, boolean requiresTimeSortedInput) { this.doFnRunner = doFnRunner; this.stepContext = stepContext; this.windowingStrategy = windowingStrategy; this.cleanupTimer = cleanupTimer; this.stateCleaner = stateCleaner; this.requiresTimeSortedInput = requiresTimeSortedInput; WindowFn<?, ?> windowFn = windowingStrategy.getWindowFn(); @SuppressWarnings("unchecked") Coder<BoundedWindow> untypedCoder = (Coder<BoundedWindow>) windowFn.windowCoder(); this.windowCoder = untypedCoder; this.sortBufferTag = StateTags.makeSystemTagInternal( StateTags.bag(SORT_BUFFER_STATE, WindowedValue.getFullCoder(inputCoder, windowCoder))); rejectMergingWindowFn(windowFn); } private void rejectMergingWindowFn(WindowFn<?, ?> windowFn) { if (!(windowFn instanceof NonMergingWindowFn)) { throw new UnsupportedOperationException( "MergingWindowFn is not supported for stateful DoFns, WindowFn is: " + windowFn); } } public List<StateTag<?>> getSystemStateTags() { return Arrays.asList(sortBufferTag, sortBufferMinStampTag, watermarkHold); } @Override public DoFn<InputT, OutputT> getFn() { return doFnRunner.getFn(); } @Override public void startBundle() { doFnRunner.startBundle(); } @Override public void finishBundle() { doFnRunner.finishBundle(); } @Override public void processElement(WindowedValue<InputT> input) { for (WindowedValue<InputT> value : input.explodeWindows()) { BoundedWindow window = value.getWindows().iterator().next(); if (isLate(window)) { reportDroppedElement(value, window); } else if (requiresTimeSortedInput) { processElementOrdered(window, value); } else { processElementUnordered(window, value); } } } private void processElementUnordered(BoundedWindow window, WindowedValue<InputT> value) { cleanupTimer.setForWindow(value.getValue(), window); doFnRunner.processElement(value); } private void processElementOrdered(BoundedWindow window, WindowedValue<InputT> value) { StateInternals stateInternals = stepContext.stateInternals(); TimerInternals timerInternals = stepContext.timerInternals(); Instant outputWatermark = MoreObjects.firstNonNull( timerInternals.currentOutputWatermarkTime(), BoundedWindow.TIMESTAMP_MIN_VALUE); if (!outputWatermark.isAfter( value.getTimestamp().plus(windowingStrategy.getAllowedLateness()))) { StateNamespace namespace = StateNamespaces.window(windowCoder, window); BagState<WindowedValue<InputT>> sortBuffer = stateInternals.state(namespace, sortBufferTag); ValueState<Instant> minStampState = stateInternals.state(namespace, sortBufferMinStampTag); sortBuffer.add(value); Instant minStamp = MoreObjects.firstNonNull(minStampState.read(), BoundedWindow.TIMESTAMP_MAX_VALUE); if (value.getTimestamp().isBefore(minStamp)) { minStamp = value.getTimestamp(); minStampState.write(minStamp); setupFlushTimerAndWatermarkHold(namespace, minStamp); } } else { reportDroppedElement(value, window); } } private void reportDroppedElement(WindowedValue<InputT> value, BoundedWindow window) { droppedDueToLateness.inc(); WindowTracing.debug( "StatefulDoFnRunner.processElement: Dropping element at {}; window:{} " + "since too far behind inputWatermark:{}", value.getTimestamp(), window, stepContext.timerInternals().currentInputWatermarkTime()); } @Override public void onTimer( String timerId, String timerFamilyId, BoundedWindow window, Instant timestamp, Instant outputTimestamp, TimeDomain timeDomain) { if (timerId.equals(SORT_FLUSH_TIMER)) { onSortFlushTimer(window, stepContext.timerInternals().currentInputWatermarkTime()); } else if (cleanupTimer.isForWindow(timerId, window, timestamp, timeDomain)) { stateCleaner.clearForWindow(window); } else { if (!timeDomain.equals(TimeDomain.EVENT_TIME) && isLate(window)) { WindowTracing.debug( "StatefulDoFnRunner.onTimer: Ignoring processing-time timer at {}; window:{} " + "since window is too far behind inputWatermark:{}", timestamp, window, stepContext.timerInternals().currentInputWatermarkTime()); } else { doFnRunner.onTimer(timerId, timerFamilyId, window, timestamp, outputTimestamp, timeDomain); } } } private void onSortFlushTimer(BoundedWindow window, Instant timestamp) { StateInternals stateInternals = stepContext.stateInternals(); StateNamespace namespace = StateNamespaces.window(windowCoder, window); BagState<WindowedValue<InputT>> sortBuffer = stateInternals.state(namespace, sortBufferTag); ValueState<Instant> minStampState = stateInternals.state(namespace, sortBufferMinStampTag); List<WindowedValue<InputT>> keep = new ArrayList<>(); List<WindowedValue<InputT>> flush = new ArrayList<>(); Instant newMinStamp = BoundedWindow.TIMESTAMP_MAX_VALUE; for (WindowedValue<InputT> e : sortBuffer.read()) { if (!e.getTimestamp().isAfter(timestamp)) { flush.add(e); } else { keep.add(e); if (e.getTimestamp().isBefore(newMinStamp)) { newMinStamp = e.getTimestamp(); } } } flush.stream() .sorted(Comparator.comparing(WindowedValue::getTimestamp)) .forEachOrdered(e -> processElementUnordered(window, e)); sortBuffer.clear(); keep.forEach(sortBuffer::add); minStampState.write(newMinStamp); if (newMinStamp.isBefore(BoundedWindow.TIMESTAMP_MAX_VALUE)) { setupFlushTimerAndWatermarkHold(namespace, newMinStamp); } else { clearWatermarkHold(namespace); } } private void setupFlushTimerAndWatermarkHold(StateNamespace namespace, Instant flush) { WatermarkHoldState watermark = stepContext.stateInternals().state(namespace, watermarkHold); stepContext .timerInternals() .setTimer( namespace, SORT_FLUSH_TIMER, SORT_FLUSH_TIMER, flush, flush, TimeDomain.EVENT_TIME); watermark.clear(); watermark.add(flush); } private void clearWatermarkHold(StateNamespace namespace) { stepContext.stateInternals().state(namespace, watermarkHold).clear(); } /** * A cleaner for deciding when to clean state of window. * * <p>A runner might either (a) already know that it always has a timer set for the expiration * time or (b) not need a timer at all because it is a batch runner that discards state when it is * done. */ public interface CleanupTimer<InputT> { /** Set the garbage collect time of the window to timer. */ void setForWindow(InputT value, BoundedWindow window); /** Checks whether the given timer is a cleanup timer for the window. */ boolean isForWindow( String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain); } /** A cleaner to clean all states of the window. */ public interface StateCleaner<W extends BoundedWindow> { void clearForWindow(W window); } /** A {@link StatefulDoFnRunner.CleanupTimer} implemented via {@link TimerInternals}. */ public static class TimeInternalsCleanupTimer<InputT> implements StatefulDoFnRunner.CleanupTimer<InputT> { public static final String GC_TIMER_ID = "__StatefulParDoGcTimerId"; /** * The amount of milliseconds by which to delay cleanup. We use this to ensure that state is * still available when a user timer for {@code window.maxTimestamp()} fires. */ public static final long GC_DELAY_MS = 1; private final TimerInternals timerInternals; private final WindowingStrategy<?, ?> windowingStrategy; private final Coder<BoundedWindow> windowCoder; public TimeInternalsCleanupTimer( TimerInternals timerInternals, WindowingStrategy<?, ?> windowingStrategy) { this.windowingStrategy = windowingStrategy; WindowFn<?, ?> windowFn = windowingStrategy.getWindowFn(); windowCoder = (Coder<BoundedWindow>) windowFn.windowCoder(); this.timerInternals = timerInternals; } @Override public void setForWindow(InputT input, BoundedWindow window) { Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); gcTime = gcTime.plus(GC_DELAY_MS); timerInternals.setTimer( StateNamespaces.window(windowCoder, window), GC_TIMER_ID, "", gcTime, window.maxTimestamp(), TimeDomain.EVENT_TIME); } @Override public boolean isForWindow( String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) { boolean isEventTimer = timeDomain.equals(TimeDomain.EVENT_TIME); Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy); gcTime = gcTime.plus(GC_DELAY_MS); return isEventTimer && GC_TIMER_ID.equals(timerId) && gcTime.equals(timestamp); } } /** A {@link StatefulDoFnRunner.StateCleaner} implemented via {@link StateInternals}. */ public static class StateInternalsStateCleaner<W extends BoundedWindow> implements StatefulDoFnRunner.StateCleaner<W> { private final DoFn<?, ?> fn; private final DoFnSignature signature; private final StateInternals stateInternals; private final Coder<W> windowCoder; public StateInternalsStateCleaner( DoFn<?, ?> fn, StateInternals stateInternals, Coder<W> windowCoder) { this.fn = fn; this.signature = DoFnSignatures.getSignature(fn.getClass()); this.stateInternals = stateInternals; this.windowCoder = windowCoder; } @Override public void clearForWindow(W window) { for (Map.Entry<String, DoFnSignature.StateDeclaration> entry : signature.stateDeclarations().entrySet()) { try { StateSpec<?> spec = (StateSpec<?>) entry.getValue().field().get(fn); State state = stateInternals.state( StateNamespaces.window(windowCoder, window), StateTags.tagForSpec(entry.getKey(), (StateSpec) spec)); state.clear(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } } } }
Since we only create a new strand when we hit a worker or start, enough to check the current parent.
public boolean lockedBySameContext(Strand ctx) { return this.current.getLast() == ctx; }
return this.current.getLast() == ctx;
public boolean lockedBySameContext(Strand ctx) { return this.current.getLast() == ctx; }
class BLock { private ArrayDeque<Strand> current; private ArrayDeque<Strand> waitingForLock; public BLock() { this.current = new ArrayDeque<>(); this.waitingForLock = new ArrayDeque<>(); } public synchronized boolean lock(Strand strand) { if (isLockFree() || lockedBySameContext(strand)) { this.current.offerLast(strand); return true; } this.waitingForLock.offerLast(strand); strand.setState(State.BLOCK_AND_YIELD); strand.blockedOnExtern = false; return false; } public synchronized void unlock() { this.current.removeLast(); if (!waitingForLock.isEmpty()) { Strand strand = this.waitingForLock.removeFirst(); strand.scheduler.unblockStrand(strand); } } public boolean isLockFree() { return this.current.isEmpty(); } }
class BLock { private ArrayDeque<Strand> current; private ArrayDeque<Strand> waitingForLock; public BLock() { this.current = new ArrayDeque<>(); this.waitingForLock = new ArrayDeque<>(); } public synchronized boolean lock(Strand strand) { if (isLockFree() || lockedBySameContext(strand)) { this.current.offerLast(strand); return true; } this.waitingForLock.offerLast(strand); strand.setState(State.BLOCK_AND_YIELD); strand.blockedOnExtern = false; return false; } public synchronized void unlock() { this.current.removeLast(); if (!waitingForLock.isEmpty()) { Strand strand = this.waitingForLock.removeFirst(); strand.scheduler.unblockStrand(strand); } } public boolean isLockFree() { return this.current.isEmpty(); } }
> It also doesn't really make sense for the maxParallelism to only be settable if it was previously autoConfigured by the system. 👍🏼 I was able to remove the concept of "auto-configured" into SchedulerBase, the only place it makes sense.
public void setMaxParallelism(int maxParallelism) { Preconditions.checkState( isMaxParallelismAutoConfigured(), "Attempt to override a configured max parallelism. Configured: " + this.maxParallelism + ", argument: " + maxParallelism); if (maxParallelism == ExecutionConfig.PARALLELISM_AUTO_MAX) { maxParallelism = KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM; } Preconditions.checkArgument( maxParallelism > 0 && maxParallelism <= KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM, "Overriding max parallelism is not in valid bounds (1..%s), found: %s", KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM, maxParallelism); this.maxParallelism = maxParallelism; }
isMaxParallelismAutoConfigured(),
public void setMaxParallelism(int maxParallelism) { parallelismInfo.setMaxParallelism(maxParallelism); }
class ExecutionJobVertex implements AccessExecutionJobVertex, Archiveable<ArchivedExecutionJobVertex> { /** Use the same log for all ExecutionGraph classes. */ private static final Logger LOG = DefaultExecutionGraph.LOG; private final Object stateMonitor = new Object(); private final InternalExecutionGraphAccessor graph; private final JobVertex jobVertex; private final ExecutionVertex[] taskVertices; private final IntermediateResult[] producedDataSets; private final List<IntermediateResult> inputs; private final int parallelism; private final SlotSharingGroup slotSharingGroup; @Nullable private final CoLocationGroup coLocationGroup; private final InputSplit[] inputSplits; private int maxParallelism; private final ResourceProfile resourceProfile; /** * Either store a serialized task information, which is for all sub tasks the same, or the * permanent blob key of the offloaded task information BLOB containing the serialized task * information. */ private Either<SerializedValue<TaskInformation>, PermanentBlobKey> taskInformationOrBlobKey = null; private final Collection<OperatorCoordinatorHolder> operatorCoordinators; private InputSplitAssigner splitAssigner; @VisibleForTesting public ExecutionJobVertex( InternalExecutionGraphAccessor graph, JobVertex jobVertex, int maxPriorAttemptsHistoryLength, Time timeout, long createTimestamp, SubtaskAttemptNumberStore initialAttemptCounts) throws JobException { if (graph == null || jobVertex == null) { throw new NullPointerException(); } this.graph = graph; this.jobVertex = jobVertex; this.parallelism = jobVertex.getParallelism() > 0 ? jobVertex.getParallelism() : 1; this.maxParallelism = jobVertex.getMaxParallelism(); if (this.parallelism > maxParallelism) { throw new JobException( String.format( "Vertex %s's parallelism (%s) is higher than the max parallelism (%s). Please lower the parallelism or increase the max parallelism.", jobVertex.getName(), this.parallelism, maxParallelism)); } this.resourceProfile = ResourceProfile.fromResourceSpec(jobVertex.getMinResources(), MemorySize.ZERO); this.taskVertices = new ExecutionVertex[this.parallelism]; this.inputs = new ArrayList<>(jobVertex.getInputs().size()); this.slotSharingGroup = checkNotNull(jobVertex.getSlotSharingGroup()); this.coLocationGroup = jobVertex.getCoLocationGroup(); this.producedDataSets = new IntermediateResult[jobVertex.getNumberOfProducedIntermediateDataSets()]; for (int i = 0; i < jobVertex.getProducedDataSets().size(); i++) { final IntermediateDataSet result = jobVertex.getProducedDataSets().get(i); this.producedDataSets[i] = new IntermediateResult( result.getId(), this, this.parallelism, result.getResultType()); } for (int i = 0; i < this.parallelism; i++) { ExecutionVertex vertex = new ExecutionVertex( this, i, producedDataSets, timeout, createTimestamp, maxPriorAttemptsHistoryLength, initialAttemptCounts.getAttemptCount(i)); this.taskVertices[i] = vertex; } for (IntermediateResult ir : this.producedDataSets) { if (ir.getNumberOfAssignedPartitions() != parallelism) { throw new RuntimeException( "The intermediate result's partitions were not correctly assigned."); } } final List<SerializedValue<OperatorCoordinator.Provider>> coordinatorProviders = getJobVertex().getOperatorCoordinators(); if (coordinatorProviders.isEmpty()) { this.operatorCoordinators = Collections.emptyList(); } else { final ArrayList<OperatorCoordinatorHolder> coordinators = new ArrayList<>(coordinatorProviders.size()); try { for (final SerializedValue<OperatorCoordinator.Provider> provider : coordinatorProviders) { coordinators.add( OperatorCoordinatorHolder.create( provider, this, graph.getUserClassLoader())); } } catch (Exception | LinkageError e) { IOUtils.closeAllQuietly(coordinators); throw new JobException( "Cannot instantiate the coordinator for operator " + getName(), e); } this.operatorCoordinators = Collections.unmodifiableList(coordinators); } try { @SuppressWarnings("unchecked") InputSplitSource<InputSplit> splitSource = (InputSplitSource<InputSplit>) jobVertex.getInputSplitSource(); if (splitSource != null) { Thread currentThread = Thread.currentThread(); ClassLoader oldContextClassLoader = currentThread.getContextClassLoader(); currentThread.setContextClassLoader(graph.getUserClassLoader()); try { inputSplits = splitSource.createInputSplits(this.parallelism); if (inputSplits != null) { splitAssigner = splitSource.getInputSplitAssigner(inputSplits); } } finally { currentThread.setContextClassLoader(oldContextClassLoader); } } else { inputSplits = null; } } catch (Throwable t) { throw new JobException( "Creating the input splits caused an error: " + t.getMessage(), t); } } /** * Returns a list containing the ID pairs of all operators contained in this execution job * vertex. * * @return list containing the ID pairs of all contained operators */ public List<OperatorIDPair> getOperatorIDs() { return jobVertex.getOperatorIDs(); } public InternalExecutionGraphAccessor getGraph() { return graph; } public JobVertex getJobVertex() { return jobVertex; } @Override public String getName() { return getJobVertex().getName(); } @Override public int getParallelism() { return parallelism; } @Override public int getMaxParallelism() { return maxParallelism; } @Override public ResourceProfile getResourceProfile() { return resourceProfile; } /** * Gets whether the max parallelism has been configured by the system. * * @return whether the max parallelism has been configured by the system. */ public boolean isMaxParallelismAutoConfigured() { return jobVertex.isMaxParallelismAutoConfigured(); } public JobID getJobId() { return graph.getJobID(); } @Override public JobVertexID getJobVertexId() { return jobVertex.getID(); } @Override public ExecutionVertex[] getTaskVertices() { return taskVertices; } public IntermediateResult[] getProducedDataSets() { return producedDataSets; } public InputSplitAssigner getSplitAssigner() { return splitAssigner; } public SlotSharingGroup getSlotSharingGroup() { return slotSharingGroup; } @Nullable public CoLocationGroup getCoLocationGroup() { return coLocationGroup; } public List<IntermediateResult> getInputs() { return inputs; } public Collection<OperatorCoordinatorHolder> getOperatorCoordinators() { return operatorCoordinators; } public Either<SerializedValue<TaskInformation>, PermanentBlobKey> getTaskInformationOrBlobKey() throws IOException { synchronized (stateMonitor) { if (taskInformationOrBlobKey == null) { final BlobWriter blobWriter = graph.getBlobWriter(); final TaskInformation taskInformation = new TaskInformation( jobVertex.getID(), jobVertex.getName(), parallelism, maxParallelism, jobVertex.getInvokableClassName(), jobVertex.getConfiguration()); taskInformationOrBlobKey = BlobWriter.serializeAndTryOffload(taskInformation, getJobId(), blobWriter); } return taskInformationOrBlobKey; } } @Override public ExecutionState getAggregateState() { int[] num = new int[ExecutionState.values().length]; for (ExecutionVertex vertex : this.taskVertices) { num[vertex.getExecutionState().ordinal()]++; } return getAggregateJobVertexState(num, parallelism); } public void connectToPredecessors( Map<IntermediateDataSetID, IntermediateResult> intermediateDataSets) throws JobException { List<JobEdge> inputs = jobVertex.getInputs(); if (LOG.isDebugEnabled()) { LOG.debug( String.format( "Connecting ExecutionJobVertex %s (%s) to %d predecessors.", jobVertex.getID(), jobVertex.getName(), inputs.size())); } for (int num = 0; num < inputs.size(); num++) { JobEdge edge = inputs.get(num); if (LOG.isDebugEnabled()) { if (edge.getSource() == null) { LOG.debug( String.format( "Connecting input %d of vertex %s (%s) to intermediate result referenced via ID %s.", num, jobVertex.getID(), jobVertex.getName(), edge.getSourceId())); } else { LOG.debug( String.format( "Connecting input %d of vertex %s (%s) to intermediate result referenced via predecessor %s (%s).", num, jobVertex.getID(), jobVertex.getName(), edge.getSource().getProducer().getID(), edge.getSource().getProducer().getName())); } } IntermediateResult ires = intermediateDataSets.get(edge.getSourceId()); if (ires == null) { throw new JobException( "Cannot connect this job graph to the previous graph. No previous intermediate result found for ID " + edge.getSourceId()); } this.inputs.add(ires); EdgeManagerBuildUtil.connectVertexToResult(this, ires, edge.getDistributionPattern()); } } /** Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } } /** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public CompletableFuture<Void> cancelWithFuture() { return FutureUtils.waitForAll(mapExecutionVertices(ExecutionVertex::cancel)); } public CompletableFuture<Void> suspend() { return FutureUtils.waitForAll(mapExecutionVertices(ExecutionVertex::suspend)); } @Nonnull private Collection<CompletableFuture<?>> mapExecutionVertices( final Function<ExecutionVertex, CompletableFuture<?>> mapFunction) { return Arrays.stream(getTaskVertices()).map(mapFunction).collect(Collectors.toList()); } public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } } public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() { Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>(); for (ExecutionVertex vertex : taskVertices) { Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators(); if (next != null) { AccumulatorHelper.mergeInto(userAccumulators, next); } } return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators); } @Override public ArchivedExecutionJobVertex archive() { return new ArchivedExecutionJobVertex(this); } /** * A utility function that computes an "aggregated" state for the vertex. * * <p>This state is not used anywhere in the coordination, but can be used for display in * dashboards to as a summary for how the particular parallel operation represented by this * ExecutionJobVertex is currently behaving. * * <p>For example, if at least one parallel task is failed, the aggregate state is failed. If * not, and at least one parallel task is cancelling (or cancelled), the aggregate state is * cancelling (or cancelled). If all tasks are finished, the aggregate state is finished, and so * on. * * @param verticesPerState The number of vertices in each state (indexed by the ordinal of the * ExecutionState values). * @param parallelism The parallelism of the ExecutionJobVertex * @return The aggregate state of this ExecutionJobVertex. */ public static ExecutionState getAggregateJobVertexState( int[] verticesPerState, int parallelism) { if (verticesPerState == null || verticesPerState.length != ExecutionState.values().length) { throw new IllegalArgumentException( "Must provide an array as large as there are execution states."); } if (verticesPerState[ExecutionState.FAILED.ordinal()] > 0) { return ExecutionState.FAILED; } if (verticesPerState[ExecutionState.CANCELING.ordinal()] > 0) { return ExecutionState.CANCELING; } else if (verticesPerState[ExecutionState.CANCELED.ordinal()] > 0) { return ExecutionState.CANCELED; } else if (verticesPerState[ExecutionState.RUNNING.ordinal()] > 0) { return ExecutionState.RUNNING; } else if (verticesPerState[ExecutionState.FINISHED.ordinal()] > 0) { return verticesPerState[ExecutionState.FINISHED.ordinal()] == parallelism ? ExecutionState.FINISHED : ExecutionState.RUNNING; } else { return ExecutionState.CREATED; } } }
class ExecutionJobVertex implements AccessExecutionJobVertex, Archiveable<ArchivedExecutionJobVertex> { /** Use the same log for all ExecutionGraph classes. */ private static final Logger LOG = DefaultExecutionGraph.LOG; private final Object stateMonitor = new Object(); private final InternalExecutionGraphAccessor graph; private final JobVertex jobVertex; private final ExecutionVertex[] taskVertices; private final IntermediateResult[] producedDataSets; private final List<IntermediateResult> inputs; private final VertexParallelismInformation parallelismInfo; private final SlotSharingGroup slotSharingGroup; @Nullable private final CoLocationGroup coLocationGroup; private final InputSplit[] inputSplits; private final ResourceProfile resourceProfile; /** * Either store a serialized task information, which is for all sub tasks the same, or the * permanent blob key of the offloaded task information BLOB containing the serialized task * information. */ private Either<SerializedValue<TaskInformation>, PermanentBlobKey> taskInformationOrBlobKey = null; private final Collection<OperatorCoordinatorHolder> operatorCoordinators; private InputSplitAssigner splitAssigner; @VisibleForTesting public ExecutionJobVertex( InternalExecutionGraphAccessor graph, JobVertex jobVertex, int maxPriorAttemptsHistoryLength, Time timeout, long createTimestamp, VertexParallelismInformation parallelismInfo, SubtaskAttemptNumberStore initialAttemptCounts) throws JobException { if (graph == null || jobVertex == null) { throw new NullPointerException(); } this.graph = graph; this.jobVertex = jobVertex; this.parallelismInfo = parallelismInfo; if (this.parallelismInfo.getParallelism() > this.parallelismInfo.getMaxParallelism()) { throw new JobException( String.format( "Vertex %s's parallelism (%s) is higher than the max parallelism (%s). Please lower the parallelism or increase the max parallelism.", jobVertex.getName(), this.parallelismInfo.getParallelism(), this.parallelismInfo.getMaxParallelism())); } this.resourceProfile = ResourceProfile.fromResourceSpec(jobVertex.getMinResources(), MemorySize.ZERO); this.taskVertices = new ExecutionVertex[this.parallelismInfo.getParallelism()]; this.inputs = new ArrayList<>(jobVertex.getInputs().size()); this.slotSharingGroup = checkNotNull(jobVertex.getSlotSharingGroup()); this.coLocationGroup = jobVertex.getCoLocationGroup(); this.producedDataSets = new IntermediateResult[jobVertex.getNumberOfProducedIntermediateDataSets()]; for (int i = 0; i < jobVertex.getProducedDataSets().size(); i++) { final IntermediateDataSet result = jobVertex.getProducedDataSets().get(i); this.producedDataSets[i] = new IntermediateResult( result.getId(), this, this.parallelismInfo.getParallelism(), result.getResultType()); } for (int i = 0; i < this.parallelismInfo.getParallelism(); i++) { ExecutionVertex vertex = new ExecutionVertex( this, i, producedDataSets, timeout, createTimestamp, maxPriorAttemptsHistoryLength, initialAttemptCounts.getAttemptCount(i)); this.taskVertices[i] = vertex; } for (IntermediateResult ir : this.producedDataSets) { if (ir.getNumberOfAssignedPartitions() != this.parallelismInfo.getParallelism()) { throw new RuntimeException( "The intermediate result's partitions were not correctly assigned."); } } final List<SerializedValue<OperatorCoordinator.Provider>> coordinatorProviders = getJobVertex().getOperatorCoordinators(); if (coordinatorProviders.isEmpty()) { this.operatorCoordinators = Collections.emptyList(); } else { final ArrayList<OperatorCoordinatorHolder> coordinators = new ArrayList<>(coordinatorProviders.size()); try { for (final SerializedValue<OperatorCoordinator.Provider> provider : coordinatorProviders) { coordinators.add( OperatorCoordinatorHolder.create( provider, this, graph.getUserClassLoader())); } } catch (Exception | LinkageError e) { IOUtils.closeAllQuietly(coordinators); throw new JobException( "Cannot instantiate the coordinator for operator " + getName(), e); } this.operatorCoordinators = Collections.unmodifiableList(coordinators); } try { @SuppressWarnings("unchecked") InputSplitSource<InputSplit> splitSource = (InputSplitSource<InputSplit>) jobVertex.getInputSplitSource(); if (splitSource != null) { Thread currentThread = Thread.currentThread(); ClassLoader oldContextClassLoader = currentThread.getContextClassLoader(); currentThread.setContextClassLoader(graph.getUserClassLoader()); try { inputSplits = splitSource.createInputSplits(this.parallelismInfo.getParallelism()); if (inputSplits != null) { splitAssigner = splitSource.getInputSplitAssigner(inputSplits); } } finally { currentThread.setContextClassLoader(oldContextClassLoader); } } else { inputSplits = null; } } catch (Throwable t) { throw new JobException( "Creating the input splits caused an error: " + t.getMessage(), t); } } /** * Returns a list containing the ID pairs of all operators contained in this execution job * vertex. * * @return list containing the ID pairs of all contained operators */ public List<OperatorIDPair> getOperatorIDs() { return jobVertex.getOperatorIDs(); } public InternalExecutionGraphAccessor getGraph() { return graph; } public JobVertex getJobVertex() { return jobVertex; } @Override public String getName() { return getJobVertex().getName(); } @Override public int getParallelism() { return parallelismInfo.getParallelism(); } @Override public int getMaxParallelism() { return parallelismInfo.getMaxParallelism(); } @Override public ResourceProfile getResourceProfile() { return resourceProfile; } public boolean canRescaleMaxParallelism(int desiredMaxParallelism) { return parallelismInfo.canRescaleMaxParallelism(desiredMaxParallelism); } public JobID getJobId() { return graph.getJobID(); } @Override public JobVertexID getJobVertexId() { return jobVertex.getID(); } @Override public ExecutionVertex[] getTaskVertices() { return taskVertices; } public IntermediateResult[] getProducedDataSets() { return producedDataSets; } public InputSplitAssigner getSplitAssigner() { return splitAssigner; } public SlotSharingGroup getSlotSharingGroup() { return slotSharingGroup; } @Nullable public CoLocationGroup getCoLocationGroup() { return coLocationGroup; } public List<IntermediateResult> getInputs() { return inputs; } public Collection<OperatorCoordinatorHolder> getOperatorCoordinators() { return operatorCoordinators; } public Either<SerializedValue<TaskInformation>, PermanentBlobKey> getTaskInformationOrBlobKey() throws IOException { synchronized (stateMonitor) { if (taskInformationOrBlobKey == null) { final BlobWriter blobWriter = graph.getBlobWriter(); final TaskInformation taskInformation = new TaskInformation( jobVertex.getID(), jobVertex.getName(), parallelismInfo.getParallelism(), parallelismInfo.getMaxParallelism(), jobVertex.getInvokableClassName(), jobVertex.getConfiguration()); taskInformationOrBlobKey = BlobWriter.serializeAndTryOffload(taskInformation, getJobId(), blobWriter); } return taskInformationOrBlobKey; } } @Override public ExecutionState getAggregateState() { int[] num = new int[ExecutionState.values().length]; for (ExecutionVertex vertex : this.taskVertices) { num[vertex.getExecutionState().ordinal()]++; } return getAggregateJobVertexState(num, this.parallelismInfo.getParallelism()); } public void connectToPredecessors( Map<IntermediateDataSetID, IntermediateResult> intermediateDataSets) throws JobException { List<JobEdge> inputs = jobVertex.getInputs(); if (LOG.isDebugEnabled()) { LOG.debug( String.format( "Connecting ExecutionJobVertex %s (%s) to %d predecessors.", jobVertex.getID(), jobVertex.getName(), inputs.size())); } for (int num = 0; num < inputs.size(); num++) { JobEdge edge = inputs.get(num); if (LOG.isDebugEnabled()) { if (edge.getSource() == null) { LOG.debug( String.format( "Connecting input %d of vertex %s (%s) to intermediate result referenced via ID %s.", num, jobVertex.getID(), jobVertex.getName(), edge.getSourceId())); } else { LOG.debug( String.format( "Connecting input %d of vertex %s (%s) to intermediate result referenced via predecessor %s (%s).", num, jobVertex.getID(), jobVertex.getName(), edge.getSource().getProducer().getID(), edge.getSource().getProducer().getName())); } } IntermediateResult ires = intermediateDataSets.get(edge.getSourceId()); if (ires == null) { throw new JobException( "Cannot connect this job graph to the previous graph. No previous intermediate result found for ID " + edge.getSourceId()); } this.inputs.add(ires); EdgeManagerBuildUtil.connectVertexToResult(this, ires, edge.getDistributionPattern()); } } /** Cancels all currently running vertex executions. */ public void cancel() { for (ExecutionVertex ev : getTaskVertices()) { ev.cancel(); } } /** * Cancels all currently running vertex executions. * * @return A future that is complete once all tasks have canceled. */ public CompletableFuture<Void> cancelWithFuture() { return FutureUtils.waitForAll(mapExecutionVertices(ExecutionVertex::cancel)); } public CompletableFuture<Void> suspend() { return FutureUtils.waitForAll(mapExecutionVertices(ExecutionVertex::suspend)); } @Nonnull private Collection<CompletableFuture<?>> mapExecutionVertices( final Function<ExecutionVertex, CompletableFuture<?>> mapFunction) { return Arrays.stream(getTaskVertices()).map(mapFunction).collect(Collectors.toList()); } public void fail(Throwable t) { for (ExecutionVertex ev : getTaskVertices()) { ev.fail(t); } } public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() { Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>(); for (ExecutionVertex vertex : taskVertices) { Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators(); if (next != null) { AccumulatorHelper.mergeInto(userAccumulators, next); } } return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators); } @Override public ArchivedExecutionJobVertex archive() { return new ArchivedExecutionJobVertex(this); } /** * A utility function that computes an "aggregated" state for the vertex. * * <p>This state is not used anywhere in the coordination, but can be used for display in * dashboards to as a summary for how the particular parallel operation represented by this * ExecutionJobVertex is currently behaving. * * <p>For example, if at least one parallel task is failed, the aggregate state is failed. If * not, and at least one parallel task is cancelling (or cancelled), the aggregate state is * cancelling (or cancelled). If all tasks are finished, the aggregate state is finished, and so * on. * * @param verticesPerState The number of vertices in each state (indexed by the ordinal of the * ExecutionState values). * @param parallelism The parallelism of the ExecutionJobVertex * @return The aggregate state of this ExecutionJobVertex. */ public static ExecutionState getAggregateJobVertexState( int[] verticesPerState, int parallelism) { if (verticesPerState == null || verticesPerState.length != ExecutionState.values().length) { throw new IllegalArgumentException( "Must provide an array as large as there are execution states."); } if (verticesPerState[ExecutionState.FAILED.ordinal()] > 0) { return ExecutionState.FAILED; } if (verticesPerState[ExecutionState.CANCELING.ordinal()] > 0) { return ExecutionState.CANCELING; } else if (verticesPerState[ExecutionState.CANCELED.ordinal()] > 0) { return ExecutionState.CANCELED; } else if (verticesPerState[ExecutionState.RUNNING.ordinal()] > 0) { return ExecutionState.RUNNING; } else if (verticesPerState[ExecutionState.FINISHED.ordinal()] > 0) { return verticesPerState[ExecutionState.FINISHED.ordinal()] == parallelism ? ExecutionState.FINISHED : ExecutionState.RUNNING; } else { return ExecutionState.CREATED; } } }
Sorry to ask this, but can we change this one to be the same format (ie. using ifPresent lambda) so the code is all consistent.
private ConfigurationBuilder builderFromProperties(Properties properties) { ConfigurationBuilder builder = new ConfigurationBuilder(); Object marshallerInstance = properties.remove(ConfigurationProperties.MARSHALLER); if (marshallerInstance != null) { if (marshallerInstance instanceof ProtoStreamMarshaller) { handleProtoStreamMarshaller((ProtoStreamMarshaller) marshallerInstance, properties, beanManager); } builder.marshaller((Marshaller) marshallerInstance); } if (infinispanClientRuntimeConfig != null) { Optional<String> runtimeServerList = infinispanClientRuntimeConfig.serverList; if (runtimeServerList.isPresent()) { properties.put(ConfigurationProperties.SERVER_LIST, runtimeServerList.get()); } infinispanClientRuntimeConfig.clientIntelligence .ifPresent(v -> properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, v)); infinispanClientRuntimeConfig.useAuth .ifPresent(v -> properties.put(ConfigurationProperties.USE_AUTH, v)); infinispanClientRuntimeConfig.authUsername .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_USERNAME, v)); infinispanClientRuntimeConfig.authPassword .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_PASSWORD, v)); infinispanClientRuntimeConfig.authRealm .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_REALM, v)); infinispanClientRuntimeConfig.authServerName .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_SERVER_NAME, v)); infinispanClientRuntimeConfig.authClientSubject .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_CLIENT_SUBJECT, v)); infinispanClientRuntimeConfig.authCallbackHandler .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_CALLBACK_HANDLER, v)); infinispanClientRuntimeConfig.saslMechanism .ifPresent(v -> properties.put(ConfigurationProperties.SASL_MECHANISM, v)); } builder.withProperties(properties); return builder; }
Optional<String> runtimeServerList = infinispanClientRuntimeConfig.serverList;
private ConfigurationBuilder builderFromProperties(Properties properties) { ConfigurationBuilder builder = new ConfigurationBuilder(); Object marshallerInstance = properties.remove(ConfigurationProperties.MARSHALLER); if (marshallerInstance != null) { if (marshallerInstance instanceof ProtoStreamMarshaller) { handleProtoStreamMarshaller((ProtoStreamMarshaller) marshallerInstance, properties, beanManager); } builder.marshaller((Marshaller) marshallerInstance); } if (infinispanClientRuntimeConfig != null) { infinispanClientRuntimeConfig.serverList .ifPresent(v -> properties.put(ConfigurationProperties.SERVER_LIST, v)); infinispanClientRuntimeConfig.clientIntelligence .ifPresent(v -> properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, v)); infinispanClientRuntimeConfig.useAuth .ifPresent(v -> properties.put(ConfigurationProperties.USE_AUTH, v)); infinispanClientRuntimeConfig.authUsername .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_USERNAME, v)); infinispanClientRuntimeConfig.authPassword .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_PASSWORD, v)); infinispanClientRuntimeConfig.authRealm .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_REALM, v)); infinispanClientRuntimeConfig.authServerName .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_SERVER_NAME, v)); infinispanClientRuntimeConfig.authClientSubject .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_CLIENT_SUBJECT, v)); infinispanClientRuntimeConfig.authCallbackHandler .ifPresent(v -> properties.put(ConfigurationProperties.AUTH_CALLBACK_HANDLER, v)); infinispanClientRuntimeConfig.saslMechanism .ifPresent(v -> properties.put(ConfigurationProperties.SASL_MECHANISM, v)); } builder.withProperties(properties); return builder; }
class path to read contents of * @return string containing the contents of the file */ private static String getContents(String fileName) { InputStream stream = InfinispanClientProducer.class.getResourceAsStream(fileName); try (Scanner scanner = new Scanner(stream, "UTF-8")) { return scanner.useDelimiter("\\A").next(); } }
class path to read contents of * @return string containing the contents of the file */ private static String getContents(String fileName) { InputStream stream = InfinispanClientProducer.class.getResourceAsStream(fileName); try (Scanner scanner = new Scanner(stream, "UTF-8")) { return scanner.useDelimiter("\\A").next(); } }
As before I would create the method `restoreDefaultConfig` and also call it here.
protected void after() { try { stopFlinkCluster(); } catch (IOException e) { LOG.error("Failure while shutting down Flink cluster.", e); } final Path originalConfig = conf.resolve(FLINK_CONF_YAML); final Path backupConfig = conf.resolve(FLINK_CONF_YAML_BACKUP); try { Files.move(backupConfig, originalConfig, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { LOG.error("Failed to restore flink-conf.yaml", e); } for (AutoCloseable fileToDelete : filesToDelete) { try { fileToDelete.close(); } catch (Exception e) { LOG.error("Failure while cleaning up file.", e); } } }
final Path originalConfig = conf.resolve(FLINK_CONF_YAML);
protected void after() { try { stopFlinkCluster(); } catch (IOException e) { LOG.error("Failure while shutting down Flink cluster.", e); } final Path originalConfig = conf.resolve(FLINK_CONF_YAML); final Path backupConfig = conf.resolve(FLINK_CONF_YAML_BACKUP); try { Files.move(backupConfig, originalConfig, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { LOG.error("Failed to restore flink-conf.yaml", e); } for (AutoCloseable fileToDelete : filesToDelete) { try { fileToDelete.close(); } catch (Exception e) { LOG.error("Failure while cleaning up file.", e); } } }
class FlinkDistribution extends ExternalResource { private static final Logger LOG = LoggerFactory.getLogger(FlinkDistribution.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final List<AutoClosablePath> filesToDelete = new ArrayList<>(4); private static final Path FLINK_CONF_YAML = Paths.get("flink-conf.yaml"); private static final Path FLINK_CONF_YAML_BACKUP = Paths.get("flink-conf.yaml.bak"); private final Path opt; private final Path lib; private final Path conf; private final Path log; private final Path bin; private Configuration defaultConfig; public FlinkDistribution() { final String distDirProperty = System.getProperty("distDir"); if (distDirProperty == null) { Assert.fail("The distDir property was not set. You can set it when running maven via -DdistDir=<path> ."); } final Path flinkDir = Paths.get(distDirProperty); bin = flinkDir.resolve("bin"); opt = flinkDir.resolve("opt"); lib = flinkDir.resolve("lib"); conf = flinkDir.resolve("conf"); log = flinkDir.resolve("log"); } @Override protected void before() throws IOException { defaultConfig = new UnmodifiableConfiguration(GlobalConfiguration.loadConfiguration(conf.toAbsolutePath().toString())); final Path originalConfig = conf.resolve(FLINK_CONF_YAML); final Path backupConfig = conf.resolve(FLINK_CONF_YAML_BACKUP); Files.copy(originalConfig, backupConfig); filesToDelete.add(new AutoClosablePath(backupConfig)); } @Override public void startFlinkCluster() throws IOException { AutoClosableProcess.runBlocking("Start Flink cluster", bin.resolve("start-cluster.sh").toAbsolutePath().toString()); final OkHttpClient client = new OkHttpClient(); final Request request = new Request.Builder() .get() .url("http: .build(); Exception reportedException = null; for (int x = 0; x < 30; x++) { try (Response response = client.newCall(request).execute()) { if (response.isSuccessful()) { final String json = response.body().string(); final JsonNode taskManagerList = OBJECT_MAPPER.readTree(json) .get("taskmanagers"); if (taskManagerList != null && taskManagerList.size() > 0) { LOG.info("Dispatcher REST endpoint is up."); return; } } } catch (IOException ioe) { reportedException = ExceptionUtils.firstOrSuppressed(ioe, reportedException); } LOG.info("Waiting for dispatcher REST endpoint to come up..."); try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); reportedException = ExceptionUtils.firstOrSuppressed(e, reportedException); } } throw new AssertionError("Dispatcher REST endpoint did not start in time.", reportedException); } public void stopFlinkCluster() throws IOException { AutoClosableProcess.runBlocking("Stop Flink Cluster", bin.resolve("stop-cluster.sh").toAbsolutePath().toString()); } public void copyOptJarsToLib(String jarNamePattern) throws FileNotFoundException, IOException { final Optional<Path> reporterJarOptional = Files.walk(opt) .filter(path -> path.getFileName().toString().startsWith("flink-metrics-prometheus")) .findFirst(); if (reporterJarOptional.isPresent()) { final Path optReporterJar = reporterJarOptional.get(); final Path libReporterJar = lib.resolve(optReporterJar.getFileName()); Files.copy(optReporterJar, libReporterJar); filesToDelete.add(new AutoClosablePath(libReporterJar)); } else { throw new FileNotFoundException("No jar could be found matching the pattern " + jarNamePattern + "."); } } public void appendConfiguration(Configuration config) throws IOException { final Configuration mergedConfig = new Configuration(); mergedConfig.addAll(defaultConfig); mergedConfig.addAll(config); final List<String> configurationLines = mergedConfig.toMap().entrySet().stream() .map(entry -> entry.getKey() + ": " + entry.getValue()) .collect(Collectors.toList()); Files.write(conf.resolve("flink-conf.yaml"), configurationLines); } public Stream<String> searchAllLogs(Pattern pattern, Function<Matcher, String> matchProcessor) throws IOException { final List<String> matches = new ArrayList<>(2); try (Stream<Path> logFilesStream = Files.list(log)) { final Iterator<Path> logFiles = logFilesStream.iterator(); while (logFiles.hasNext()) { final Path logFile = logFiles.next(); if (!logFile.getFileName().toString().endsWith(".log")) { continue; } try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile.toFile()), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { Matcher matcher = pattern.matcher(line); if (matcher.matches()) { matches.add(matchProcessor.apply(matcher)); } } } } } return matches.stream(); } }
class FlinkDistribution extends ExternalResource { private static final Logger LOG = LoggerFactory.getLogger(FlinkDistribution.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final Path FLINK_CONF_YAML = Paths.get("flink-conf.yaml"); private static final Path FLINK_CONF_YAML_BACKUP = Paths.get("flink-conf.yaml.bak"); private final List<AutoClosablePath> filesToDelete = new ArrayList<>(4); private final Path opt; private final Path lib; private final Path conf; private final Path log; private final Path bin; private Configuration defaultConfig; public FlinkDistribution() { final String distDirProperty = System.getProperty("distDir"); if (distDirProperty == null) { Assert.fail("The distDir property was not set. You can set it when running maven via -DdistDir=<path> ."); } final Path flinkDir = Paths.get(distDirProperty); bin = flinkDir.resolve("bin"); opt = flinkDir.resolve("opt"); lib = flinkDir.resolve("lib"); conf = flinkDir.resolve("conf"); log = flinkDir.resolve("log"); } @Override protected void before() throws IOException { defaultConfig = new UnmodifiableConfiguration(GlobalConfiguration.loadConfiguration(conf.toAbsolutePath().toString())); final Path originalConfig = conf.resolve(FLINK_CONF_YAML); final Path backupConfig = conf.resolve(FLINK_CONF_YAML_BACKUP); Files.copy(originalConfig, backupConfig); filesToDelete.add(new AutoClosablePath(backupConfig)); } @Override public void startFlinkCluster() throws IOException { AutoClosableProcess.runBlocking("Start Flink cluster", bin.resolve("start-cluster.sh").toAbsolutePath().toString()); final OkHttpClient client = new OkHttpClient(); final Request request = new Request.Builder() .get() .url("http: .build(); Exception reportedException = null; for (int retryAttempt = 0; retryAttempt < 30; retryAttempt++) { try (Response response = client.newCall(request).execute()) { if (response.isSuccessful()) { final String json = response.body().string(); final JsonNode taskManagerList = OBJECT_MAPPER.readTree(json) .get("taskmanagers"); if (taskManagerList != null && taskManagerList.size() > 0) { LOG.info("Dispatcher REST endpoint is up."); return; } } } catch (IOException ioe) { reportedException = ExceptionUtils.firstOrSuppressed(ioe, reportedException); } LOG.info("Waiting for dispatcher REST endpoint to come up..."); try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); reportedException = ExceptionUtils.firstOrSuppressed(e, reportedException); } } throw new AssertionError("Dispatcher REST endpoint did not start in time.", reportedException); } public void stopFlinkCluster() throws IOException { AutoClosableProcess.runBlocking("Stop Flink Cluster", bin.resolve("stop-cluster.sh").toAbsolutePath().toString()); } public void copyOptJarsToLib(String jarNamePrefix) throws FileNotFoundException, IOException { final Optional<Path> reporterJarOptional = Files.walk(opt) .filter(path -> path.getFileName().toString().startsWith(jarNamePrefix)) .findFirst(); if (reporterJarOptional.isPresent()) { final Path optReporterJar = reporterJarOptional.get(); final Path libReporterJar = lib.resolve(optReporterJar.getFileName()); Files.copy(optReporterJar, libReporterJar); filesToDelete.add(new AutoClosablePath(libReporterJar)); } else { throw new FileNotFoundException("No jar could be found matching the pattern " + jarNamePrefix + "."); } } public void appendConfiguration(Configuration config) throws IOException { final Configuration mergedConfig = new Configuration(); mergedConfig.addAll(defaultConfig); mergedConfig.addAll(config); final List<String> configurationLines = mergedConfig.toMap().entrySet().stream() .map(entry -> entry.getKey() + ": " + entry.getValue()) .collect(Collectors.toList()); Files.write(conf.resolve("flink-conf.yaml"), configurationLines); } public Stream<String> searchAllLogs(Pattern pattern, Function<Matcher, String> matchProcessor) throws IOException { final List<String> matches = new ArrayList<>(2); try (Stream<Path> logFilesStream = Files.list(log)) { final Iterator<Path> logFiles = logFilesStream.iterator(); while (logFiles.hasNext()) { final Path logFile = logFiles.next(); if (!logFile.getFileName().toString().endsWith(".log")) { continue; } try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile.toFile()), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { Matcher matcher = pattern.matcher(line); if (matcher.matches()) { matches.add(matchProcessor.apply(matcher)); } } } } } return matches.stream(); } }
Actually, it was wrong. The async connection must happen during the actual subscription and not during the initialization. So during the initialization, the connector should not do anything "network related". The code changed in SmallRye Reactive Messaging.
void onApplicationStart(@Observes StartupEvent event) { try { mediatorManager.initializeAndRun(); } catch (Exception e) { throw new RuntimeException(e); } }
throw new RuntimeException(e);
void onApplicationStart(@Observes StartupEvent event) { try { mediatorManager.initializeAndRun(); } catch (Exception e) { throw new RuntimeException(e); } }
class SmallRyeReactiveMessagingLifecycle { @Inject MediatorManager mediatorManager; }
class SmallRyeReactiveMessagingLifecycle { @Inject MediatorManager mediatorManager; }
Should be better for perf since we don't create new flux every iteration.
public Mono<Void> runAsync() { return blobAsyncClient.upload(randomByteBufferFlux, null, true).then(); }
return blobAsyncClient.upload(randomByteBufferFlux, null, true).then();
public Mono<Void> runAsync() { return blobAsyncClient.upload(randomByteBufferFlux, null, true).then(); }
class UploadBlobTest extends BlobTestBase<PerfStressOptions> { private final Flux<ByteBuffer> randomByteBufferFlux; public UploadBlobTest(PerfStressOptions options) { super(options); this.randomByteBufferFlux = createRandomByteBufferFlux(options.getSize()); } @Override public void run() { throw new UnsupportedOperationException(); } @Override }
class UploadBlobTest extends BlobTestBase<PerfStressOptions> { private final Flux<ByteBuffer> randomByteBufferFlux; public UploadBlobTest(PerfStressOptions options) { super(options); this.randomByteBufferFlux = createRandomByteBufferFlux(options.getSize()); } @Override public void run() { throw new UnsupportedOperationException(); } @Override }
My understanding is that the boolean expression for `COUNTIF` aggregate will be computed in a precursory `Project`. As of right now list of supported aggregate functions is limited to: https://github.com/apache/beam/blob/659d84b4be5bdd36b408359a8c69f4eb12771180/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/SqlStdOperatorMappingTable.java#L181-L189
public RelNode convert(ResolvedAggregateScan zetaNode, List<RelNode> inputs) { RelNode input = convertAggregateScanInputScanToLogicalProject(zetaNode, inputs.get(0)); int groupFieldsListSize = zetaNode.getGroupByList().size(); ImmutableBitSet groupSet; if (groupFieldsListSize != 0) { groupSet = ImmutableBitSet.of( IntStream.rangeClosed(0, groupFieldsListSize - 1) .boxed() .collect(Collectors.toList())); } else { groupSet = ImmutableBitSet.of(); } List<AggregateCall> aggregateCalls; if (zetaNode.getAggregateList().isEmpty()) { aggregateCalls = ImmutableList.of(); } else { aggregateCalls = new ArrayList<>(); int columnRefoff = groupFieldsListSize; for (ResolvedComputedColumn computedColumn : zetaNode.getAggregateList()) { AggregateCall aggCall = convertAggCall(computedColumn, columnRefoff); aggregateCalls.add(aggCall); if (!aggCall.getArgList().isEmpty()) { columnRefoff++; } } } LogicalAggregate logicalAggregate = new LogicalAggregate( getCluster(), input.getTraitSet(), input, groupSet, ImmutableList.of(groupSet), aggregateCalls); return logicalAggregate; }
columnRefoff++;
public RelNode convert(ResolvedAggregateScan zetaNode, List<RelNode> inputs) { RelNode input = convertAggregateScanInputScanToLogicalProject(zetaNode, inputs.get(0)); int groupFieldsListSize = zetaNode.getGroupByList().size(); ImmutableBitSet groupSet; if (groupFieldsListSize != 0) { groupSet = ImmutableBitSet.of( IntStream.rangeClosed(0, groupFieldsListSize - 1) .boxed() .collect(Collectors.toList())); } else { groupSet = ImmutableBitSet.of(); } List<AggregateCall> aggregateCalls; if (zetaNode.getAggregateList().isEmpty()) { aggregateCalls = ImmutableList.of(); } else { aggregateCalls = new ArrayList<>(); int columnRefoff = groupFieldsListSize; for (ResolvedComputedColumn computedColumn : zetaNode.getAggregateList()) { AggregateCall aggCall = convertAggCall(computedColumn, columnRefoff); aggregateCalls.add(aggCall); if (!aggCall.getArgList().isEmpty()) { columnRefoff++; } } } LogicalAggregate logicalAggregate = new LogicalAggregate( getCluster(), input.getTraitSet(), input, groupSet, ImmutableList.of(groupSet), aggregateCalls); return logicalAggregate; }
class AggregateScanConverter extends RelConverter<ResolvedAggregateScan> { private static final String AVG_ILLEGAL_LONG_INPUT_TYPE = "AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE)."; AggregateScanConverter(ConversionContext context) { super(context); } @Override public List<ResolvedNode> getInputs(ResolvedAggregateScan zetaNode) { return Collections.singletonList(zetaNode.getInputScan()); } @Override private RelNode convertAggregateScanInputScanToLogicalProject( ResolvedAggregateScan node, RelNode input) { List<RexNode> projects = new ArrayList<>(); List<String> fieldNames = new ArrayList<>(); for (ResolvedComputedColumn computedColumn : node.getGroupByList()) { projects.add( getExpressionConverter() .convertRexNodeFromResolvedExpr( computedColumn.getExpr(), node.getInputScan().getColumnList(), input.getRowType().getFieldList())); fieldNames.add(getTrait().resolveAlias(computedColumn.getColumn())); } for (ResolvedComputedColumn resolvedComputedColumn : node.getAggregateList()) { ResolvedAggregateFunctionCall aggregateFunctionCall = ((ResolvedAggregateFunctionCall) resolvedComputedColumn.getExpr()); if (aggregateFunctionCall.getArgumentList() != null && aggregateFunctionCall.getArgumentList().size() == 1) { ResolvedExpr resolvedExpr = aggregateFunctionCall.getArgumentList().get(0); projects.add( getExpressionConverter() .convertRexNodeFromResolvedExpr( resolvedExpr, node.getInputScan().getColumnList(), input.getRowType().getFieldList())); fieldNames.add(getTrait().resolveAlias(resolvedComputedColumn.getColumn())); } else if (aggregateFunctionCall.getArgumentList() != null && aggregateFunctionCall.getArgumentList().size() > 1) { throw new RuntimeException( aggregateFunctionCall.getFunction().getName() + " has more than one argument."); } } return LogicalProject.create(input, projects, fieldNames); } private AggregateCall convertAggCall(ResolvedComputedColumn computedColumn, int columnRefOff) { ResolvedAggregateFunctionCall aggregateFunctionCall = (ResolvedAggregateFunctionCall) computedColumn.getExpr(); if (aggregateFunctionCall.getFunction().getName().equals("avg")) { FunctionSignature signature = aggregateFunctionCall.getSignature(); if (signature .getFunctionArgumentList() .get(0) .getType() .getKind() .equals(TypeKind.TYPE_INT64)) { throw new RuntimeException(AVG_ILLEGAL_LONG_INPUT_TYPE); } } if (aggregateFunctionCall.getDistinct()) { throw new RuntimeException( "Does not support " + aggregateFunctionCall.getFunction().getSqlName() + " DISTINCT. 'SELECT DISTINCT' syntax could be used to deduplicate before" + " aggregation."); } SqlAggFunction sqlAggFunction = (SqlAggFunction) SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( aggregateFunctionCall.getFunction().getName()); if (sqlAggFunction == null) { throw new RuntimeException( "Does not support ZetaSQL aggregate function: " + aggregateFunctionCall.getFunction().getName()); } List<Integer> argList = new ArrayList<>(); for (ResolvedExpr expr : ((ResolvedAggregateFunctionCall) computedColumn.getExpr()).getArgumentList()) { if (expr.nodeKind() == RESOLVED_CAST || expr.nodeKind() == RESOLVED_COLUMN_REF || expr.nodeKind() == RESOLVED_GET_STRUCT_FIELD) { argList.add(columnRefOff); } else { throw new RuntimeException( "Aggregate function only accepts Column Reference or CAST(Column Reference) as its" + " input."); } } RelDataType returnType; if (sqlAggFunction.equals(SqlStdOperatorTable.ANY_VALUE)) { returnType = toSimpleRelDataType( computedColumn.getColumn().getType().getKind(), getCluster().getRexBuilder(), true); } else { returnType = toSimpleRelDataType( computedColumn.getColumn().getType().getKind(), getCluster().getRexBuilder(), false); } String aggName = getTrait().resolveAlias(computedColumn.getColumn()); return AggregateCall.create( sqlAggFunction, false, false, false, argList, -1, RelCollations.EMPTY, returnType, aggName); } }
class AggregateScanConverter extends RelConverter<ResolvedAggregateScan> { private static final String AVG_ILLEGAL_LONG_INPUT_TYPE = "AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE)."; AggregateScanConverter(ConversionContext context) { super(context); } @Override public List<ResolvedNode> getInputs(ResolvedAggregateScan zetaNode) { return Collections.singletonList(zetaNode.getInputScan()); } @Override private RelNode convertAggregateScanInputScanToLogicalProject( ResolvedAggregateScan node, RelNode input) { List<RexNode> projects = new ArrayList<>(); List<String> fieldNames = new ArrayList<>(); for (ResolvedComputedColumn computedColumn : node.getGroupByList()) { projects.add( getExpressionConverter() .convertRexNodeFromResolvedExpr( computedColumn.getExpr(), node.getInputScan().getColumnList(), input.getRowType().getFieldList())); fieldNames.add(getTrait().resolveAlias(computedColumn.getColumn())); } for (ResolvedComputedColumn resolvedComputedColumn : node.getAggregateList()) { ResolvedAggregateFunctionCall aggregateFunctionCall = ((ResolvedAggregateFunctionCall) resolvedComputedColumn.getExpr()); if (aggregateFunctionCall.getArgumentList() != null && aggregateFunctionCall.getArgumentList().size() == 1) { ResolvedExpr resolvedExpr = aggregateFunctionCall.getArgumentList().get(0); projects.add( getExpressionConverter() .convertRexNodeFromResolvedExpr( resolvedExpr, node.getInputScan().getColumnList(), input.getRowType().getFieldList())); fieldNames.add(getTrait().resolveAlias(resolvedComputedColumn.getColumn())); } else if (aggregateFunctionCall.getArgumentList() != null && aggregateFunctionCall.getArgumentList().size() > 1) { throw new RuntimeException( aggregateFunctionCall.getFunction().getName() + " has more than one argument."); } } return LogicalProject.create(input, projects, fieldNames); } private AggregateCall convertAggCall(ResolvedComputedColumn computedColumn, int columnRefOff) { ResolvedAggregateFunctionCall aggregateFunctionCall = (ResolvedAggregateFunctionCall) computedColumn.getExpr(); if (aggregateFunctionCall.getFunction().getName().equals("avg")) { FunctionSignature signature = aggregateFunctionCall.getSignature(); if (signature .getFunctionArgumentList() .get(0) .getType() .getKind() .equals(TypeKind.TYPE_INT64)) { throw new RuntimeException(AVG_ILLEGAL_LONG_INPUT_TYPE); } } if (aggregateFunctionCall.getDistinct()) { throw new RuntimeException( "Does not support " + aggregateFunctionCall.getFunction().getSqlName() + " DISTINCT. 'SELECT DISTINCT' syntax could be used to deduplicate before" + " aggregation."); } SqlAggFunction sqlAggFunction = (SqlAggFunction) SqlStdOperatorMappingTable.ZETASQL_FUNCTION_TO_CALCITE_SQL_OPERATOR.get( aggregateFunctionCall.getFunction().getName()); if (sqlAggFunction == null) { throw new RuntimeException( "Does not support ZetaSQL aggregate function: " + aggregateFunctionCall.getFunction().getName()); } List<Integer> argList = new ArrayList<>(); for (ResolvedExpr expr : ((ResolvedAggregateFunctionCall) computedColumn.getExpr()).getArgumentList()) { if (expr.nodeKind() == RESOLVED_CAST || expr.nodeKind() == RESOLVED_COLUMN_REF || expr.nodeKind() == RESOLVED_GET_STRUCT_FIELD) { argList.add(columnRefOff); } else { throw new RuntimeException( "Aggregate function only accepts Column Reference or CAST(Column Reference) as its" + " input."); } } RelDataType returnType; if (sqlAggFunction.equals(SqlStdOperatorTable.ANY_VALUE)) { returnType = toSimpleRelDataType( computedColumn.getColumn().getType().getKind(), getCluster().getRexBuilder(), true); } else { returnType = toSimpleRelDataType( computedColumn.getColumn().getType().getKind(), getCluster().getRexBuilder(), false); } String aggName = getTrait().resolveAlias(computedColumn.getColumn()); return AggregateCall.create( sqlAggFunction, false, false, false, argList, -1, RelCollations.EMPTY, returnType, aggName); } }
@cescoffier thanks for the review. > Same comments: > > * how to handle when there are several Pools In the meantime multiple reactive datasources are not handled, once added we will for sure need to handle them the same way we have handled them for the Agroal connection healthcheck. > * you need to be sure to not block the IO Thread. Okay, are there constructs (a bean that I can inject ) that I can use to check if in IO Thread? And if in IO Thread case, what should be done?
public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named("Reactive PostgreSQL connection health check").up(); try { CompletableFuture<Void> databaseConnectionAttempt = new CompletableFuture<>(); pgPool.query("SELECT 1", ar -> { if (ar.failed()) { builder.down(); } databaseConnectionAttempt.complete(null); }); databaseConnectionAttempt.join(); } catch (Exception exception) { builder.down(); } return builder.build(); }
databaseConnectionAttempt.join();
public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named("Reactive PostgreSQL connection health check").up(); try { CompletableFuture<Void> databaseConnectionAttempt = new CompletableFuture<>(); pgPool.query("SELECT 1") .execute(ar -> { if (ar.failed()) { builder.down(); } databaseConnectionAttempt.complete(null); }); databaseConnectionAttempt.get(10, TimeUnit.SECONDS); } catch (Exception exception) { builder.down(); } return builder.build(); }
class ReactivePgDataSourceHealthCheck implements HealthCheck { private PgPool pgPool; @PostConstruct protected void init() { pgPool = Arc.container().instance(PgPool.class).get(); } @Override }
class ReactivePgDataSourceHealthCheck implements HealthCheck { private PgPool pgPool; @PostConstruct protected void init() { pgPool = Arc.container().instance(PgPool.class).get(); } @Override }
Noting that the return value is not `@Nullable`. Suggest re-enabling nullness checking here since it would have caught this. (you could perhaps suppress it for other parts of the file you don't want to fix.
public Progress getProgress() { if (currentReader == null) { return null; } Double consumedFraction = currentReader.getFractionConsumed(); if (consumedFraction == null) { return null; } return RestrictionTracker.Progress.from( consumedFraction.doubleValue(), 1 - consumedFraction.doubleValue()); }
return null;
public Progress getProgress() { if (currentReader == null) { return Progress.NONE; } Double consumedFraction = currentReader.getFractionConsumed(); if (consumedFraction == null) { return Progress.NONE; } return Progress.from(consumedFraction, 1 - consumedFraction); }
class BoundedSourceAsSDFRestrictionTracker< BoundedSourceT extends BoundedSource<T>, T> extends RestrictionTracker<BoundedSourceT, TimestampedValue<T>[]> implements HasProgress { private final BoundedSourceT initialRestriction; private final PipelineOptions pipelineOptions; private BoundedSource.BoundedReader<T> currentReader; private boolean claimedAll; BoundedSourceAsSDFRestrictionTracker( BoundedSourceT initialRestriction, PipelineOptions pipelineOptions) { this.initialRestriction = initialRestriction; this.pipelineOptions = pipelineOptions; } @Override public boolean tryClaim(TimestampedValue<T>[] position) { if (claimedAll) { return false; } try { if (currentReader == null) { currentReader = initialRestriction.createReader(pipelineOptions); if (!currentReader.start()) { claimedAll = true; try { currentReader.close(); } finally { currentReader = null; } return false; } position[0] = TimestampedValue.of( currentReader.getCurrent(), currentReader.getCurrentTimestamp()); return true; } if (!currentReader.advance()) { claimedAll = true; try { currentReader.close(); } finally { currentReader = null; } return false; } position[0] = TimestampedValue.of(currentReader.getCurrent(), currentReader.getCurrentTimestamp()); return true; } catch (IOException e) { if (currentReader != null) { try { currentReader.close(); } catch (IOException closeException) { e.addSuppressed(closeException); } finally { currentReader = null; } } throw new RuntimeException(e); } } @Override protected void finalize() throws Throwable { if (currentReader != null) { try { currentReader.close(); } catch (IOException e) { LOG.error("Failed to close BoundedReader due to failure processing bundle.", e); } } } /** The value is invalid if {@link @Override public BoundedSourceT currentRestriction() { if (currentReader == null) { return initialRestriction; } return (BoundedSourceT) currentReader.getCurrentSource(); } @Override public SplitResult<BoundedSourceT> trySplit(double fractionOfRemainder) { if (currentReader == null) { return null; } Double consumedFraction = currentReader.getFractionConsumed(); double fraction = fractionOfRemainder; if (consumedFraction != null) { fraction = consumedFraction + (1 - consumedFraction) * fractionOfRemainder; } BoundedSource<T> residual = currentReader.splitAtFraction(fraction); if (residual == null) { return null; } BoundedSource<T> primary = currentReader.getCurrentSource(); return (SplitResult<BoundedSourceT>) SplitResult.of(primary, residual); } @Override public void checkDone() throws IllegalStateException { checkState( claimedAll, "Expected all records to have been claimed but finished processing " + "bounded source while some records may have not been read."); } @Override public IsBounded isBounded() { return IsBounded.BOUNDED; } @Override }
class BoundedSourceAsSDFRestrictionTracker< BoundedSourceT extends BoundedSource<T>, T> extends RestrictionTracker<BoundedSourceT, TimestampedValue<T>[]> implements HasProgress { private final BoundedSourceT initialRestriction; private final PipelineOptions pipelineOptions; private BoundedSource.BoundedReader<T> currentReader; private boolean claimedAll; BoundedSourceAsSDFRestrictionTracker( BoundedSourceT initialRestriction, PipelineOptions pipelineOptions) { this.initialRestriction = initialRestriction; this.pipelineOptions = pipelineOptions; } @Override public boolean tryClaim(TimestampedValue<T>[] position) { if (claimedAll) { return false; } try { if (currentReader == null) { currentReader = initialRestriction.createReader(pipelineOptions); if (!currentReader.start()) { claimedAll = true; try { currentReader.close(); } finally { currentReader = null; } return false; } position[0] = TimestampedValue.of( currentReader.getCurrent(), currentReader.getCurrentTimestamp()); return true; } if (!currentReader.advance()) { claimedAll = true; try { currentReader.close(); } finally { currentReader = null; } return false; } position[0] = TimestampedValue.of(currentReader.getCurrent(), currentReader.getCurrentTimestamp()); return true; } catch (IOException e) { if (currentReader != null) { try { currentReader.close(); } catch (IOException closeException) { e.addSuppressed(closeException); } finally { currentReader = null; } } throw new RuntimeException(e); } } @Override protected void finalize() throws Throwable { if (currentReader != null) { try { currentReader.close(); } catch (IOException e) { LOG.error("Failed to close BoundedReader due to failure processing bundle.", e); } } } /** The value is invalid if {@link @Override public BoundedSourceT currentRestriction() { if (currentReader == null) { return initialRestriction; } return (BoundedSourceT) currentReader.getCurrentSource(); } @Override public SplitResult<BoundedSourceT> trySplit(double fractionOfRemainder) { if (currentReader == null) { return null; } Double consumedFraction = currentReader.getFractionConsumed(); double fraction = fractionOfRemainder; if (consumedFraction != null) { fraction = consumedFraction + (1 - consumedFraction) * fractionOfRemainder; } BoundedSource<T> residual = currentReader.splitAtFraction(fraction); if (residual == null) { return null; } BoundedSource<T> primary = currentReader.getCurrentSource(); return (SplitResult<BoundedSourceT>) SplitResult.of(primary, residual); } @Override public void checkDone() throws IllegalStateException { checkState( claimedAll, "Expected all records to have been claimed but finished processing " + "bounded source while some records may have not been read."); } @Override public IsBounded isBounded() { return IsBounded.BOUNDED; } @Override }
Is this ability to add filters documented somewhere?
public void init(@Observes Filters filters) { filters.register(rc -> { rc.response().putHeader("X-Header", "AAAA"); rc.next(); }, 100); }
filters.register(rc -> {
public void init(@Observes Filters filters) { filters.register(rc -> { rc.response().putHeader("X-Header", "AAAA"); rc.next(); }, 100); }
class DevFilter { }
class DevFilter { }
Do we need to allow this? The debug port won't be reachable in any case.
public void requireThatJvmOptionsAreLogged() throws IOException, SAXException { verifyLoggingOfJvmOptions(true, "options", "-Xms2G foo bar", "foo", "bar"); verifyLoggingOfJvmOptions(true, "options", "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmOptions(false, "options", "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmOptions(true, "options", "-Xms2G"); verifyLoggingOfJvmOptions(true, "options", "-verbose:gc"); verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64 -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"); verifyLoggingOfJvmOptions(false, "options", "-Xms2G"); }
verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64 -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005");
public void requireThatJvmOptionsAreLogged() throws IOException, SAXException { verifyLoggingOfJvmOptions(true, "options", "-Xms2G foo bar", "foo", "bar"); verifyLoggingOfJvmOptions(true, "options", "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmOptions(true, "options", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"); verifyLoggingOfJvmOptions(false, "options", "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmOptions(true, "options", "-Xms2G"); verifyLoggingOfJvmOptions(true, "options", "-verbose:gc"); verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64"); verifyLoggingOfJvmOptions(false, "options", "-Xms2G"); }
class JvmOptionsTest extends ContainerModelBuilderTestBase { @Test public void verify_jvm_tag_with_attributes() throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <search/>" + " <nodes>" + " <jvm options='-XX:SoftRefLRUPolicyMSPerMB=2500' gc-options='-XX:+UseParNewGC' allocated-memory='45%'/>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals("-XX:+UseParNewGC", qrStartConfig.jvm().gcopts()); assertEquals(45, qrStartConfig.jvm().heapSizeAsPercentageOfPhysicalMemory()); assertEquals("-XX:SoftRefLRUPolicyMSPerMB=2500", model.getContainerClusters().values().iterator().next().getContainers().get(0).getJvmOptions()); } @Test public void detect_conflicting_jvmgcoptions_in_jvmargs() { assertFalse(ContainerModelBuilder.incompatibleGCOptions("")); assertFalse(ContainerModelBuilder.incompatibleGCOptions("UseG1GC")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:+UseG1GC")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("abc -XX:+UseParNewGC xyz")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:CMSInitiatingOccupancyFraction=19")); } @Test public void honours_jvm_gc_options() { Element clusterElem = DomBuilderTest.parse( "<container version='1.0'>", " <search/>", " <nodes jvm-gc-options='-XX:+UseG1GC'>", " <node hostalias='mockhost'/>", " </nodes>", "</container>" ); createModel(root, clusterElem); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); root.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals("-XX:+UseG1GC", qrStartConfig.jvm().gcopts()); } private static void verifyIgnoreJvmGCOptions(boolean isHosted) throws IOException, SAXException { verifyIgnoreJvmGCOptionsIfJvmArgs("jvmargs", ContainerCluster.G1GC, isHosted); verifyIgnoreJvmGCOptionsIfJvmArgs( "jvm-options", "-XX:+UseG1GC", isHosted); } private static void verifyIgnoreJvmGCOptionsIfJvmArgs(String jvmOptionsName, String expectedGC, boolean isHosted) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes jvm-gc-options='-XX:+UseG1GC' " + jvmOptionsName + "='-XX:+UseParNewGC'>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .properties(new TestProperties().setHostedVespa(isHosted)) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals(expectedGC, qrStartConfig.jvm().gcopts()); } @Test public void ignores_jvmgcoptions_on_conflicting_jvmargs() throws IOException, SAXException { verifyIgnoreJvmGCOptions(false); verifyIgnoreJvmGCOptions(true); } private void verifyJvmGCOptions(boolean isHosted, String featureFlagDefault, String override, String expected) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes " + ((override == null) ? ">" : ("jvm-gc-options='" + override + "'>")) + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .properties(new TestProperties().setJvmGCOptions(featureFlagDefault).setHostedVespa(isHosted)) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals(expected, qrStartConfig.jvm().gcopts()); } @Test public void requireThatJvmGCOptionsIsHonoured() throws IOException, SAXException { verifyJvmGCOptions(false, null, null, ContainerCluster.G1GC); verifyJvmGCOptions(true, null, null, ContainerCluster.PARALLEL_GC); verifyJvmGCOptions(true, "", null, ContainerCluster.PARALLEL_GC); verifyJvmGCOptions(false, "-XX:+UseG1GC", null, "-XX:+UseG1GC"); verifyJvmGCOptions(true, "-XX:+UseG1GC", null, "-XX:+UseG1GC"); verifyJvmGCOptions(false, null, "-XX:+UseG1GC", "-XX:+UseG1GC"); verifyJvmGCOptions(false, "-XX:+UseParallelGC", "-XX:+UseG1GC", "-XX:+UseG1GC"); verifyJvmGCOptions(false, null, "-XX:+UseParallelGC", "-XX:+UseParallelGC"); } @Test public void requireThatInvalidJvmGcOptionsAreLogged() throws IOException, SAXException { verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8 foo bar", "foo", "bar"); verifyLoggingOfJvmGcOptions(true, "-XX:+UseCMSInitiatingOccupancyOnly foo bar", "-XX:+UseCMSInitiatingOccupancyOnly", "foo", "bar"); verifyLoggingOfJvmGcOptions(true, "-XX:+UseConcMarkSweepGC", "-XX:+UseConcMarkSweepGC"); verifyLoggingOfJvmGcOptions(true, "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmGcOptions(false, "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8"); verifyLoggingOfJvmGcOptions(true, "-XX:MaxTenuringThreshold"); verifyLoggingOfJvmGcOptions(false, "-XX:+UseConcMarkSweepGC"); } @Test public void requireThatInvalidJvmGcOptionsFailDeployment() throws IOException, SAXException { try { buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true), new TestLogger(), "gc-options", "-XX:+ParallelGCThreads=8 foo bar"); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid JVM GC options in services.xml: bar,foo")); } } private void verifyLoggingOfJvmGcOptions(boolean isHosted, String override, String... invalidOptions) throws IOException, SAXException { verifyLoggingOfJvmOptions(isHosted, "gc-options", override, invalidOptions); } private void verifyLoggingOfJvmOptions(boolean isHosted, String optionName, String override, String... invalidOptions) throws IOException, SAXException { TestLogger logger = new TestLogger(); buildModelWithJvmOptions(isHosted, logger, optionName, override); List<String> strings = Arrays.asList(invalidOptions.clone()); if (strings.isEmpty()) { assertEquals(logger.msgs.size() > 0 ? logger.msgs.get(0).getSecond() : "", 0, logger.msgs.size()); return; } Collections.sort(strings); Pair<Level, String> firstOption = logger.msgs.get(0); assertEquals(Level.WARNING, firstOption.getFirst()); assertEquals("Invalid JVM " + (optionName.equals("gc-options") ? "GC " : "") + "options in services.xml: " + String.join(",", strings), firstOption.getSecond()); } private void buildModelWithJvmOptions(boolean isHosted, TestLogger logger, String optionName, String override) throws IOException, SAXException { buildModelWithJvmOptions(new TestProperties().setHostedVespa(isHosted), logger, optionName, override); } private void buildModelWithJvmOptions(TestProperties properties, TestLogger logger, String optionName, String override) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes>" + " <jvm " + optionName + "='" + override + "'/>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage app = new MockApplicationPackage.Builder().withServices(servicesXml).build(); new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(app) .deployLogger(logger) .properties(properties) .build()); } @Test @Test public void requireThatInvalidJvmOptionsFailDeployment() throws IOException, SAXException { try { buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true), new TestLogger(), "options", "-Xms2G foo bar"); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid JVM options in services.xml: bar,foo")); } } }
class JvmOptionsTest extends ContainerModelBuilderTestBase { @Test public void verify_jvm_tag_with_attributes() throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <search/>" + " <nodes>" + " <jvm options='-XX:SoftRefLRUPolicyMSPerMB=2500' gc-options='-XX:+UseParNewGC' allocated-memory='45%'/>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals("-XX:+UseParNewGC", qrStartConfig.jvm().gcopts()); assertEquals(45, qrStartConfig.jvm().heapSizeAsPercentageOfPhysicalMemory()); assertEquals("-XX:SoftRefLRUPolicyMSPerMB=2500", model.getContainerClusters().values().iterator().next().getContainers().get(0).getJvmOptions()); } @Test public void detect_conflicting_jvmgcoptions_in_jvmargs() { assertFalse(ContainerModelBuilder.incompatibleGCOptions("")); assertFalse(ContainerModelBuilder.incompatibleGCOptions("UseG1GC")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:+UseG1GC")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("abc -XX:+UseParNewGC xyz")); assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:CMSInitiatingOccupancyFraction=19")); } @Test public void honours_jvm_gc_options() { Element clusterElem = DomBuilderTest.parse( "<container version='1.0'>", " <search/>", " <nodes jvm-gc-options='-XX:+UseG1GC'>", " <node hostalias='mockhost'/>", " </nodes>", "</container>" ); createModel(root, clusterElem); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); root.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals("-XX:+UseG1GC", qrStartConfig.jvm().gcopts()); } private static void verifyIgnoreJvmGCOptions(boolean isHosted) throws IOException, SAXException { verifyIgnoreJvmGCOptionsIfJvmArgs("jvmargs", ContainerCluster.G1GC, isHosted); verifyIgnoreJvmGCOptionsIfJvmArgs( "jvm-options", "-XX:+UseG1GC", isHosted); } private static void verifyIgnoreJvmGCOptionsIfJvmArgs(String jvmOptionsName, String expectedGC, boolean isHosted) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes jvm-gc-options='-XX:+UseG1GC' " + jvmOptionsName + "='-XX:+UseParNewGC'>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .properties(new TestProperties().setHostedVespa(isHosted)) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals(expectedGC, qrStartConfig.jvm().gcopts()); } @Test public void ignores_jvmgcoptions_on_conflicting_jvmargs() throws IOException, SAXException { verifyIgnoreJvmGCOptions(false); verifyIgnoreJvmGCOptions(true); } private void verifyJvmGCOptions(boolean isHosted, String featureFlagDefault, String override, String expected) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes " + ((override == null) ? ">" : ("jvm-gc-options='" + override + "'>")) + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build(); final TestLogger logger = new TestLogger(); VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(applicationPackage) .deployLogger(logger) .properties(new TestProperties().setJvmGCOptions(featureFlagDefault).setHostedVespa(isHosted)) .build()); QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder(); model.getConfig(qrStartBuilder, "container/container.0"); QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder); assertEquals(expected, qrStartConfig.jvm().gcopts()); } @Test public void requireThatJvmGCOptionsIsHonoured() throws IOException, SAXException { verifyJvmGCOptions(false, null, null, ContainerCluster.G1GC); verifyJvmGCOptions(true, null, null, ContainerCluster.PARALLEL_GC); verifyJvmGCOptions(true, "", null, ContainerCluster.PARALLEL_GC); verifyJvmGCOptions(false, "-XX:+UseG1GC", null, "-XX:+UseG1GC"); verifyJvmGCOptions(true, "-XX:+UseG1GC", null, "-XX:+UseG1GC"); verifyJvmGCOptions(false, null, "-XX:+UseG1GC", "-XX:+UseG1GC"); verifyJvmGCOptions(false, "-XX:+UseParallelGC", "-XX:+UseG1GC", "-XX:+UseG1GC"); verifyJvmGCOptions(false, null, "-XX:+UseParallelGC", "-XX:+UseParallelGC"); } @Test public void requireThatInvalidJvmGcOptionsAreLogged() throws IOException, SAXException { verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8 foo bar", "foo", "bar"); verifyLoggingOfJvmGcOptions(true, "-XX:+UseCMSInitiatingOccupancyOnly foo bar", "-XX:+UseCMSInitiatingOccupancyOnly", "foo", "bar"); verifyLoggingOfJvmGcOptions(true, "-XX:+UseConcMarkSweepGC", "-XX:+UseConcMarkSweepGC"); verifyLoggingOfJvmGcOptions(true, "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmGcOptions(false, "$(touch /tmp/hello-from-gc-options)", "$(touch", "/tmp/hello-from-gc-options)"); verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8"); verifyLoggingOfJvmGcOptions(true, "-XX:MaxTenuringThreshold"); verifyLoggingOfJvmGcOptions(false, "-XX:+UseConcMarkSweepGC"); } @Test public void requireThatInvalidJvmGcOptionsFailDeployment() throws IOException, SAXException { try { buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true), new TestLogger(), "gc-options", "-XX:+ParallelGCThreads=8 foo bar"); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid JVM GC options in services.xml: bar,foo")); } } private void verifyLoggingOfJvmGcOptions(boolean isHosted, String override, String... invalidOptions) throws IOException, SAXException { verifyLoggingOfJvmOptions(isHosted, "gc-options", override, invalidOptions); } private void verifyLoggingOfJvmOptions(boolean isHosted, String optionName, String override, String... invalidOptions) throws IOException, SAXException { TestLogger logger = new TestLogger(); buildModelWithJvmOptions(isHosted, logger, optionName, override); List<String> strings = Arrays.asList(invalidOptions.clone()); if (strings.isEmpty()) { assertEquals(logger.msgs.size() > 0 ? logger.msgs.get(0).getSecond() : "", 0, logger.msgs.size()); return; } assertTrue("Expected 1 or more log messages for invalid JM options, got none", logger.msgs.size() > 0); Pair<Level, String> firstOption = logger.msgs.get(0); assertEquals(Level.WARNING, firstOption.getFirst()); Collections.sort(strings); assertEquals("Invalid JVM " + (optionName.equals("gc-options") ? "GC " : "") + "options in services.xml: " + String.join(",", strings), firstOption.getSecond()); } private void buildModelWithJvmOptions(boolean isHosted, TestLogger logger, String optionName, String override) throws IOException, SAXException { buildModelWithJvmOptions(new TestProperties().setHostedVespa(isHosted), logger, optionName, override); } private void buildModelWithJvmOptions(TestProperties properties, TestLogger logger, String optionName, String override) throws IOException, SAXException { String servicesXml = "<container version='1.0'>" + " <nodes>" + " <jvm " + optionName + "='" + override + "'/>" + " <node hostalias='mockhost'/>" + " </nodes>" + "</container>"; ApplicationPackage app = new MockApplicationPackage.Builder().withServices(servicesXml).build(); new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder() .applicationPackage(app) .deployLogger(logger) .properties(properties) .build()); } @Test @Test public void requireThatInvalidJvmOptionsFailDeployment() throws IOException, SAXException { try { buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true), new TestLogger(), "options", "-Xms2G foo bar"); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Invalid JVM options in services.xml: bar,foo")); } } }
Sounds like a pragmatic solution to me
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) { Version targetPlatform = controller.jobController().run(id).get().versions().targetPlatform(); final Version platform = targetPlatform.equals(Version.fromString("7.220.14")) ? targetPlatform : controller.systemVersion(); logger.log("Deploying the tester container on platform " + platform + " ..."); return deploy(id.tester().id(), id.type(), () -> controller.applications().deployTester(id.tester(), testerPackage(id), id.type().zone(controller.system()), platform), controller.jobController().run(id).get() .stepInfo(deployTester).get() .startTime().get(), logger); }
final Version platform = targetPlatform.equals(Version.fromString("7.220.14"))
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) { Version targetPlatform = controller.jobController().run(id).get().versions().targetPlatform(); final Version platform = targetPlatform.equals(Version.fromString("7.220.14")) ? targetPlatform : controller.systemVersion(); logger.log("Deploying the tester container on platform " + platform + " ..."); return deploy(id.tester().id(), id.type(), () -> controller.applications().deployTester(id.tester(), testerPackage(id), id.type().zone(controller.system()), platform), controller.jobController().run(id).get() .stepInfo(deployTester).get() .startTime().get(), logger); }
class InternalStepRunner implements StepRunner { private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName()); static final NodeResources DEFAULT_TESTER_RESOURCES = new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any); static final NodeResources DEFAULT_TESTER_RESOURCES_AWS = new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any); private final Controller controller; private final TestConfigSerializer testConfigSerializer; private final DeploymentFailureMails mails; private final Timeouts timeouts; public InternalStepRunner(Controller controller) { this.controller = controller; this.testConfigSerializer = new TestConfigSerializer(controller.system()); this.mails = new DeploymentFailureMails(controller.zoneRegistry()); this.timeouts = Timeouts.of(controller.system()); } @Override public Optional<RunStatus> run(LockedStep step, RunId id) { DualLogger logger = new DualLogger(id, step.get()); try { switch (step.get()) { case deployTester: return deployTester(id, logger); case deployInitialReal: return deployInitialReal(id, logger); case installInitialReal: return installInitialReal(id, logger); case deployReal: return deployReal(id, logger); case installTester: return installTester(id, logger); case installReal: return installReal(id, logger); case startStagingSetup: return startTests(id, true, logger); case endStagingSetup: case endTests: return endTests(id, logger); case startTests: return startTests(id, false, logger); case copyVespaLogs: return copyVespaLogs(id, logger); case deactivateReal: return deactivateReal(id, logger); case deactivateTester: return deactivateTester(id, logger); case report: return report(id, logger); default: throw new AssertionError("Unknown step '" + step + "'!"); } } catch (UncheckedIOException e) { logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e); return Optional.empty(); } catch (RuntimeException e) { logger.log(WARNING, "Unexpected exception running " + id, e); if (step.get().alwaysRun()) { logger.log("Will keep trying, as this is a cleanup step."); return Optional.empty(); } return Optional.of(error); } } private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.sourcePlatform().orElse(versions.targetPlatform()) + " and application version " + versions.sourceApplication().orElse(versions.targetApplication()).id() + " ..."); return deployReal(id, true, logger); } private Optional<RunStatus> deployReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.targetPlatform() + " and application version " + versions.targetApplication().id() + " ..."); return deployReal(id, false, logger); } private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) { return deploy(id.application(), id.type(), () -> controller.applications().deploy2(id.job(), setTheStage), controller.jobController().run(id).get() .stepInfo(setTheStage ? deployInitialReal : deployReal).get() .startTime().get(), logger); } private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, Instant startTime, DualLogger logger) { try { PrepareResponse prepareResponse = deployment.get().prepareResponse(); if (prepareResponse.log != null) logger.logAll(prepareResponse.log.stream() .map(entry -> new LogEntry(0, Instant.ofEpochMilli(entry.time), LogEntry.typeOf(LogLevel.parse(entry.level)), entry.message)) .collect(toList())); if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) { List<String> messages = new ArrayList<>(); messages.add("Deploy failed due to non-compatible changes that require re-feed."); messages.add("Your options are:"); messages.add("1. Revert the incompatible changes."); messages.add("2. If you think it is safe in your case, you can override this validation, see"); messages.add(" http: messages.add("3. Deploy as a new application under a different name."); messages.add("Illegal actions:"); prepareResponse.configChangeActions.refeedActions.stream() .filter(action -> ! action.allowed) .flatMap(action -> action.messages.stream()) .forEach(messages::add); logger.log(messages); return Optional.of(deploymentFailed); } if (prepareResponse.configChangeActions.restartActions.isEmpty()) logger.log("No services requiring restart."); else prepareResponse.configChangeActions.restartActions.stream() .flatMap(action -> action.services.stream()) .map(service -> service.hostName) .sorted().distinct() .map(Hostname::new) .forEach(hostname -> { controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname)); logger.log("Schedule service restart on host " + hostname.id() + "."); }); logger.log("Deployment successful."); if (prepareResponse.message != null) logger.log(prepareResponse.message); return Optional.of(running); } catch (ConfigServerException e) { Optional<RunStatus> result = startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(deploymentFailed) : Optional.empty(); switch (e.getErrorCode()) { case CERTIFICATE_NOT_READY: if (startTime.plus(timeouts.endpointCertificate()).isBefore(controller.clock().instant())) { logger.log("Deployment failed to find provisioned endpoint certificate after " + timeouts.endpointCertificate()); return Optional.of(RunStatus.endpointCertificateTimeout); } return result; case ACTIVATION_CONFLICT: case APPLICATION_LOCK_FAILURE: logger.log("Deployment failed with possibly transient error " + e.getErrorCode() + ", will retry: " + e.getMessage()); return result; case LOAD_BALANCER_NOT_READY: case PARENT_HOST_NOT_READY: logger.log(e.getServerMessage()); return result; case OUT_OF_CAPACITY: logger.log(e.getServerMessage()); return controller.system().isCd() && startTime.plus(timeouts.capacity()).isAfter(controller.clock().instant()) ? Optional.empty() : Optional.of(outOfCapacity); case INVALID_APPLICATION_PACKAGE: case BAD_REQUEST: logger.log(e.getMessage()); return Optional.of(deploymentFailed); } throw e; } catch (EndpointCertificateException e) { switch (e.type()) { case CERT_NOT_AVAILABLE: if (startTime.plus(timeouts.endpointCertificate()).isBefore(controller.clock().instant())) { logger.log("Deployment failed to find provisioned endpoint certificate after " + timeouts.endpointCertificate()); return Optional.of(RunStatus.endpointCertificateTimeout); } return Optional.empty(); default: throw e; } } } private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) { return installReal(id, true, logger); } private Optional<RunStatus> installReal(RunId id, DualLogger logger) { return installReal(id, false, logger); } private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before installation was successful."); return Optional.of(installationFailed); } Versions versions = controller.jobController().run(id).get().versions(); Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform(); Run run = controller.jobController().run(id).get(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id.application(), id.type().zone(controller.system())), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), id.application(), ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); boolean firstTick = run.convergenceSummary().isEmpty(); if (firstTick) { logger.log(" logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); } ConvergenceSummary summary = nodeList.summary(); if (summary.converged()) { controller.jobController().locked(id, lockedRun -> lockedRun.withSummary(null)); if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) { if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) { logger.log("Installation succeeded!"); return Optional.of(running); } } else if (timedOut(id, deployment.get(), timeouts.endpoint())) { logger.log(WARNING, "Endpoints failed to show up within " + timeouts.endpoint().toMinutes() + " minutes!"); return Optional.of(error); } } String failureReason = null; NodeList suspendedTooLong = nodeList.suspendedSince(controller.clock().instant().minus(timeouts.nodesDown())); if ( ! suspendedTooLong.isEmpty()) { failureReason = "Some nodes have been suspended for more than " + timeouts.nodesDown().toMinutes() + " minutes:\n" + suspendedTooLong.asList().stream().map(node -> node.node().hostname().value()).collect(joining("\n")); } if (run.noNodesDownSince() .map(since -> since.isBefore(controller.clock().instant().minus(timeouts.noNodesDown()))) .orElse(false)) { if (summary.needPlatformUpgrade() > 0 || summary.needReboot() > 0 || summary.needRestart() > 0) failureReason = "No nodes allowed to suspend to progress installation for " + timeouts.noNodesDown().toMinutes() + " minutes."; else failureReason = "Nodes not able to start with new application package."; } Duration timeout = JobRunner.jobTimeout.minusHours(1); if (timedOut(id, deployment.get(), timeout)) { failureReason = "Installation failed to complete within " + timeout.toHours() + "hours!"; } if (failureReason != null) { logger.log(" logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); logger.log(" logger.log(nodeList.not().in(nodeList.not().needsNewConfig() .not().needsPlatformUpgrade() .not().needsReboot() .not().needsRestart() .not().needsFirmwareUpgrade() .not().needsOsUpgrade()) .asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); logger.log(INFO, failureReason); return Optional.of(installationFailed); } if ( ! firstTick) logger.log(nodeList.expectedDown().concat(nodeList.needsNewConfig()).asList().stream() .distinct() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); controller.jobController().locked(id, lockedRun -> { Instant noNodesDownSince = nodeList.allowedDown().size() == 0 ? lockedRun.noNodesDownSince().orElse(controller.clock().instant()) : null; return lockedRun.noNodesDownSince(noNodesDownSince).withSummary(summary); }); return Optional.empty(); } private Optional<RunStatus> installTester(RunId id, DualLogger logger) { Run run = controller.jobController().run(id).get(); Version platform = controller.systemVersion(); ZoneId zone = id.type().zone(controller.system()); ApplicationId testerId = id.tester().id(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(testerId, zone), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return run.stepInfo(installTester).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, testerId, ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(zone, nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); if (nodeList.summary().converged() && testerContainersAreUp(testerId, zone, logger)) { logger.log("Tester container successfully installed!"); return Optional.of(running); } if (run.stepInfo(installTester).get().startTime().get().plus(timeouts.tester()).isBefore(controller.clock().instant())) { logger.log(WARNING, "Installation of tester failed to complete within " + timeouts.tester().toMinutes() + " minutes!"); return Optional.of(error); } return Optional.empty(); } /** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { var endpoints = controller.routing().zoneEndpointsOf(Set.of(new DeploymentId(id, zoneId))); if ( ! endpoints.containsKey(zoneId)) return false; for (var endpoint : endpoints.get(zoneId)) { boolean ready = controller.jobController().cloud().ready(endpoint.url()); if ( ! ready) { logger.log("Failed to get 100 consecutive OKs from " + endpoint); return false; } } return true; } /** Returns true iff all containers in the tester deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean testerContainersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { DeploymentId deploymentId = new DeploymentId(id, zoneId); if (controller.jobController().cloud().testerReady(deploymentId)) { return true; } else { logger.log("Failed to get 100 consecutive OKs from tester container for " + deploymentId); return false; } } private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) { var endpoints = controller.routing().zoneEndpointsOf(Set.of(new DeploymentId(id, zone))); if ( ! endpoints.containsKey(zone)) { logger.log("Endpoints not yet ready."); return false; } var policies = controller.routing().policies().get(new DeploymentId(id, zone)); for (var endpoint : endpoints.get(zone)) { HostName endpointName = HostName.from(endpoint.dnsName()); var ipAddress = controller.jobController().cloud().resolveHostName(endpointName); if (ipAddress.isEmpty()) { logger.log(INFO, "DNS lookup yielded no IP address for '" + endpointName + "'."); return false; } if (endpoint.routingMethod() == RoutingMethod.exclusive) { var policy = policies.get(new RoutingPolicyId(id, ClusterSpec.Id.from(endpoint.name()), zone)); if (policy == null) throw new IllegalStateException(endpoint + " has no matching policy in " + policies); var cNameValue = controller.jobController().cloud().resolveCname(endpointName); if ( ! cNameValue.map(policy.canonicalName()::equals).orElse(false)) { logger.log(INFO, "CNAME '" + endpointName + "' points at " + cNameValue.map(name -> "'" + name + "'").orElse("nothing") + " but should point at load balancer '" + policy.canonicalName() + "'"); return false; } var loadBalancerAddress = controller.jobController().cloud().resolveHostName(policy.canonicalName()); if ( ! loadBalancerAddress.equals(ipAddress)) { logger.log(INFO, "IP address of CNAME '" + endpointName + "' (" + ipAddress.get() + ") and load balancer '" + policy.canonicalName() + "' (" + loadBalancerAddress.orElse("empty") + ") are not equal"); return false; } } } logEndpoints(endpoints, logger); return true; } private void logEndpoints(Map<ZoneId, List<Endpoint>> zoneEndpoints, DualLogger logger) { List<String> messages = new ArrayList<>(); messages.add("Found endpoints:"); zoneEndpoints.forEach((zone, endpoints) -> { messages.add("- " + zone); for (Endpoint endpoint : endpoints) messages.add(" |-- " + endpoint.url() + " (cluster '" + endpoint.name() + "')"); }); logger.log(messages); } private Stream<String> nodeDetails(NodeWithServices node, boolean printAllServices) { return Stream.concat(Stream.of(node.node().hostname() + ": " + humanize(node.node().serviceState()) + (node.node().suspendedSince().map(since -> " since " + since).orElse("")), "--- platform " + wantedPlatform(node.node()) + (node.needsPlatformUpgrade() ? " <-- " + currentPlatform(node.node()) : "") + (node.needsOsUpgrade() && node.isAllowedDown() ? ", upgrading OS (" + node.node().wantedOsVersion() + " <-- " + node.node().currentOsVersion() + ")" : "") + (node.needsFirmwareUpgrade() && node.isAllowedDown() ? ", upgrading firmware" : "") + (node.needsRestart() ? ", restart pending (" + node.node().wantedRestartGeneration() + " <-- " + node.node().restartGeneration() + ")" : "") + (node.needsReboot() ? ", reboot pending (" + node.node().wantedRebootGeneration() + " <-- " + node.node().rebootGeneration() + ")" : "")), node.services().stream() .filter(service -> printAllServices || node.needsNewConfig()) .map(service -> "--- " + service.type() + " on port " + service.port() + (service.currentGeneration() == -1 ? " has not started " : " has config generation " + service.currentGeneration() + ", wanted is " + node.wantedConfigGeneration()))); } private String wantedPlatform(Node node) { return node.wantedDockerImage().repository() + ":" + node.wantedVersion(); } private String currentPlatform(Node node) { String currentRepo = node.currentDockerImage().repository(); String wantedRepo = node.wantedDockerImage().repository(); return (currentRepo.equals(wantedRepo) ? "" : currentRepo + ":") + node.currentVersion(); } private String humanize(Node.ServiceState state) { switch (state) { case allowedDown: return "allowed to be DOWN"; case expectedUp: return "expected to be UP"; case unorchestrated: return "unorchestrated"; default: return state.name(); } } private Optional<RunStatus> startTests(RunId id, boolean isSetup, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before tests could start."); return Optional.of(error); } var deployments = controller.applications().requireInstance(id.application()) .productionDeployments().keySet().stream() .map(zone -> new DeploymentId(id.application(), zone)) .collect(Collectors.toSet()); ZoneId zoneId = id.type().zone(controller.system()); deployments.add(new DeploymentId(id.application(), zoneId)); logger.log("Attempting to find endpoints ..."); var endpoints = controller.routing().zoneEndpointsOf(deployments); if ( ! endpoints.containsKey(zoneId)) { logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!"); return Optional.of(error); } logEndpoints(endpoints, logger); if (!controller.jobController().cloud().testerReady(getTesterDeploymentId(id))) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } logger.log("Starting tests ..."); TesterCloud.Suite suite = TesterCloud.Suite.of(id.type(), isSetup); byte[] config = testConfigSerializer.configJson(id.application(), id.type(), true, endpoints, controller.applications().contentClustersByZone(deployments)); controller.jobController().cloud().startTests(getTesterDeploymentId(id), suite, config); return Optional.of(running); } private Optional<RunStatus> endTests(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isEmpty()) { logger.log(INFO, "Deployment expired before tests could complete."); return Optional.of(aborted); } Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate(); if (testerCertificate.isPresent()) { try { testerCertificate.get().checkValidity(Date.from(controller.clock().instant())); } catch (CertificateExpiredException | CertificateNotYetValidException e) { logger.log(INFO, "Tester certificate expired before tests could complete."); return Optional.of(aborted); } } controller.jobController().updateTestLog(id); TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(getTesterDeploymentId(id)); switch (testStatus) { case NOT_STARTED: throw new IllegalStateException("Tester reports tests not started, even though they should have!"); case RUNNING: return Optional.empty(); case FAILURE: logger.log("Tests failed."); return Optional.of(testFailure); case ERROR: logger.log(INFO, "Tester failed running its tests!"); return Optional.of(error); case SUCCESS: logger.log("Tests completed successfully."); return Optional.of(running); default: throw new IllegalStateException("Unknown status '" + testStatus + "'!"); } } private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isPresent()) try { controller.jobController().updateVespaLog(id); } catch (Exception e) { logger.log(INFO, "Failure getting vespa logs for " + id, e); return Optional.of(error); } return Optional.of(running); } private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) { try { logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.applications().deactivate(id.application(), id.type().zone(controller.system())); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting application " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateReal).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) { try { logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.jobController().deactivateTester(id.tester(), id.type()); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting tester of " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateTester).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> report(RunId id, DualLogger logger) { try { controller.jobController().active(id).ifPresent(run -> { if (run.hasFailed()) sendNotification(run, logger); }); } catch (IllegalStateException e) { logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e); return Optional.of(error); } return Optional.of(running); } /** Sends a mail with a notification of a failed run, if one should be sent. */ private void sendNotification(Run run, DualLogger logger) { Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application())); Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications(); boolean newCommit = application.require(run.id().application().instance()).change().application() .map(run.versions().targetApplication()::equals) .orElse(false); When when = newCommit ? failingCommit : failing; List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when)); if (notifications.emailRolesFor(when).contains(author)) run.versions().targetApplication().authorEmail().ifPresent(recipients::add); if (recipients.isEmpty()) return; try { logger.log(INFO, "Sending failure notification to " + String.join(", ", recipients)); mailOf(run, recipients).ifPresent(controller.serviceRegistry().mailer()::send); } catch (RuntimeException e) { logger.log(INFO, "Exception trying to send mail for " + run.id(), e); } } private Optional<Mail> mailOf(Run run, List<String> recipients) { switch (run.status()) { case running: case aborted: case success: return Optional.empty(); case outOfCapacity: return run.id().type().isProduction() ? Optional.of(mails.outOfCapacity(run.id(), recipients)) : Optional.empty(); case deploymentFailed: return Optional.of(mails.deploymentFailure(run.id(), recipients)); case installationFailed: return Optional.of(mails.installationFailure(run.id(), recipients)); case testFailure: return Optional.of(mails.testFailure(run.id(), recipients)); case error: case endpointCertificateTimeout: return Optional.of(mails.systemError(run.id(), recipients)); default: logger.log(WARNING, "Don't know what mail to send for run status '" + run.status() + "'"); return Optional.of(mails.systemError(run.id(), recipients)); } } /** Returns the deployment of the real application in the zone of the given job, if it exists. */ private Optional<Deployment> deployment(ApplicationId id, JobType type) { return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system()))); } /** Returns the real application with the given id. */ private Instance application(ApplicationId id) { controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { }); return controller.applications().requireInstance(id); } /** * Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout. * * We time out the job before the deployment expires, for zones where deployments are not persistent, * to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry, * and the given default installation timeout, minus one minute, is used as a timeout threshold. */ private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) { Run run = controller.jobController().run(id).get(); if ( ! controller.system().isCd() && run.start().isAfter(deployment.at())) return false; Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone()) .filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0) .orElse(defaultTimeout); return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1)))); } /** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */ private ApplicationPackage testerPackage(RunId id) { ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication(); DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec(); ZoneId zone = id.type().zone(controller.system()); boolean useTesterCertificate = controller.system().isPublic() && id.type().environment().isTest(); byte[] servicesXml = servicesXml(! controller.system().isPublic(), useTesterCertificate, testerResourcesFor(zone, spec.requireInstance(id.application().instance()))); byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version); byte[] deploymentXml = deploymentXml(id.tester(), spec.athenzDomain(), spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region())); try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) { zipBuilder.add(testPackage); zipBuilder.add("services.xml", servicesXml); zipBuilder.add("deployment.xml", deploymentXml); if (useTesterCertificate) appendAndStoreCertificate(zipBuilder, id); zipBuilder.close(); return new ApplicationPackage(zipBuilder.toByteArray()); } } private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) { KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048); X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number()); X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair, subject, controller.clock().instant(), controller.clock().instant().plus(timeouts.testerCertificate()), SignatureAlgorithm.SHA512_WITH_RSA, BigInteger.valueOf(1)) .build(); controller.jobController().storeTesterCertificate(id, certificate); zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8)); zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8)); } private DeploymentId getTesterDeploymentId(RunId runId) { ZoneId zoneId = runId.type().zone(controller.system()); return new DeploymentId(runId.tester().id(), zoneId); } static NodeResources testerResourcesFor(ZoneId zone, DeploymentInstanceSpec spec) { return spec.steps().stream() .filter(step -> step.concerns(zone.environment())) .findFirst() .flatMap(step -> step.zones().get(0).testerFlavor()) .map(NodeResources::fromLegacyName) .orElse(zone.region().value().contains("aws-") ? DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES); } /** Returns the generated services.xml content for the tester application. */ static byte[] servicesXml(boolean systemUsesAthenz, boolean useTesterCertificate, NodeResources resources) { int jdiscMemoryGb = 2; int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb()); int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2); String resourceString = String.format(Locale.ENGLISH, "<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>", resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name()); String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<services xmlns:deploy='vespa' version='1.0'>\n" + " <container version='1.0' id='tester'>\n" + "\n" + " <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" + " <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" + " <artifactsPath>artifacts</artifactsPath>\n" + " <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" + " <useAthenzCredentials>" + systemUsesAthenz + "</useAthenzCredentials>\n" + " <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" + " </config>\n" + " </component>\n" + "\n" + " <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" + " <binding>http: " </handler>\n" + "\n" + " <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" + " " + resourceString + "\n" + " </nodes>\n" + " </container>\n" + "</services>\n"; return servicesXml.getBytes(UTF_8); } /** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */ private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) { String deploymentSpec = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<deployment version=\"1.0\" " + athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") + athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" + " <instance id=\"" + id.id().instance().value() + "\" />" + "</deployment>"; return deploymentSpec.getBytes(UTF_8); } /** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */ private class DualLogger { private final RunId id; private final Step step; private DualLogger(RunId id, Step step) { this.id = id; this.step = step; } private void log(String... messages) { log(List.of(messages)); } private void logAll(List<LogEntry> messages) { controller.jobController().log(id, step, messages); } private void log(List<String> messages) { controller.jobController().log(id, step, INFO, messages); } private void log(Level level, String message) { log(level, message, null); } private void logWithInternalException(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); controller.jobController().log(id, step, level, message); } private void log(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); if (thrown != null) { ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream(); thrown.printStackTrace(new PrintStream(traceBuffer)); message += "\n" + traceBuffer; } controller.jobController().log(id, step, level, message); } } static class Timeouts { private final SystemName system; private Timeouts(SystemName system) { this.system = requireNonNull(system); } public static Timeouts of(SystemName system) { return new Timeouts(system); } Duration capacity() { return Duration.ofMinutes(system.isCd() ? 5 : 0); } Duration endpoint() { return Duration.ofMinutes(15); } Duration endpointCertificate() { return Duration.ofMinutes(20); } Duration tester() { return Duration.ofMinutes(30); } Duration nodesDown() { return Duration.ofMinutes(system.isCd() ? 30 : 60); } Duration noNodesDown() { return Duration.ofMinutes(system.isCd() ? 30 : 120); } Duration testerCertificate() { return Duration.ofMinutes(300); } } }
class InternalStepRunner implements StepRunner { private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName()); static final NodeResources DEFAULT_TESTER_RESOURCES = new NodeResources(1, 4, 50, 0.3, NodeResources.DiskSpeed.any); static final NodeResources DEFAULT_TESTER_RESOURCES_AWS = new NodeResources(2, 8, 50, 0.3, NodeResources.DiskSpeed.any); private final Controller controller; private final TestConfigSerializer testConfigSerializer; private final DeploymentFailureMails mails; private final Timeouts timeouts; public InternalStepRunner(Controller controller) { this.controller = controller; this.testConfigSerializer = new TestConfigSerializer(controller.system()); this.mails = new DeploymentFailureMails(controller.zoneRegistry()); this.timeouts = Timeouts.of(controller.system()); } @Override public Optional<RunStatus> run(LockedStep step, RunId id) { DualLogger logger = new DualLogger(id, step.get()); try { switch (step.get()) { case deployTester: return deployTester(id, logger); case deployInitialReal: return deployInitialReal(id, logger); case installInitialReal: return installInitialReal(id, logger); case deployReal: return deployReal(id, logger); case installTester: return installTester(id, logger); case installReal: return installReal(id, logger); case startStagingSetup: return startTests(id, true, logger); case endStagingSetup: case endTests: return endTests(id, logger); case startTests: return startTests(id, false, logger); case copyVespaLogs: return copyVespaLogs(id, logger); case deactivateReal: return deactivateReal(id, logger); case deactivateTester: return deactivateTester(id, logger); case report: return report(id, logger); default: throw new AssertionError("Unknown step '" + step + "'!"); } } catch (UncheckedIOException e) { logger.logWithInternalException(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e), e); return Optional.empty(); } catch (RuntimeException e) { logger.log(WARNING, "Unexpected exception running " + id, e); if (step.get().alwaysRun()) { logger.log("Will keep trying, as this is a cleanup step."); return Optional.empty(); } return Optional.of(error); } } private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.sourcePlatform().orElse(versions.targetPlatform()) + " and application version " + versions.sourceApplication().orElse(versions.targetApplication()).id() + " ..."); return deployReal(id, true, logger); } private Optional<RunStatus> deployReal(RunId id, DualLogger logger) { Versions versions = controller.jobController().run(id).get().versions(); logger.log("Deploying platform version " + versions.targetPlatform() + " and application version " + versions.targetApplication().id() + " ..."); return deployReal(id, false, logger); } private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) { return deploy(id.application(), id.type(), () -> controller.applications().deploy2(id.job(), setTheStage), controller.jobController().run(id).get() .stepInfo(setTheStage ? deployInitialReal : deployReal).get() .startTime().get(), logger); } private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, Instant startTime, DualLogger logger) { try { PrepareResponse prepareResponse = deployment.get().prepareResponse(); if (prepareResponse.log != null) logger.logAll(prepareResponse.log.stream() .map(entry -> new LogEntry(0, Instant.ofEpochMilli(entry.time), LogEntry.typeOf(LogLevel.parse(entry.level)), entry.message)) .collect(toList())); if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) { List<String> messages = new ArrayList<>(); messages.add("Deploy failed due to non-compatible changes that require re-feed."); messages.add("Your options are:"); messages.add("1. Revert the incompatible changes."); messages.add("2. If you think it is safe in your case, you can override this validation, see"); messages.add(" http: messages.add("3. Deploy as a new application under a different name."); messages.add("Illegal actions:"); prepareResponse.configChangeActions.refeedActions.stream() .filter(action -> ! action.allowed) .flatMap(action -> action.messages.stream()) .forEach(messages::add); logger.log(messages); return Optional.of(deploymentFailed); } if (prepareResponse.configChangeActions.restartActions.isEmpty()) logger.log("No services requiring restart."); else prepareResponse.configChangeActions.restartActions.stream() .flatMap(action -> action.services.stream()) .map(service -> service.hostName) .sorted().distinct() .map(Hostname::new) .forEach(hostname -> { controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname)); logger.log("Schedule service restart on host " + hostname.id() + "."); }); logger.log("Deployment successful."); if (prepareResponse.message != null) logger.log(prepareResponse.message); return Optional.of(running); } catch (ConfigServerException e) { Optional<RunStatus> result = startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(deploymentFailed) : Optional.empty(); switch (e.getErrorCode()) { case CERTIFICATE_NOT_READY: if (startTime.plus(timeouts.endpointCertificate()).isBefore(controller.clock().instant())) { logger.log("Deployment failed to find provisioned endpoint certificate after " + timeouts.endpointCertificate()); return Optional.of(RunStatus.endpointCertificateTimeout); } return result; case ACTIVATION_CONFLICT: case APPLICATION_LOCK_FAILURE: logger.log("Deployment failed with possibly transient error " + e.getErrorCode() + ", will retry: " + e.getMessage()); return result; case LOAD_BALANCER_NOT_READY: case PARENT_HOST_NOT_READY: logger.log(e.getServerMessage()); return result; case OUT_OF_CAPACITY: logger.log(e.getServerMessage()); return controller.system().isCd() && startTime.plus(timeouts.capacity()).isAfter(controller.clock().instant()) ? Optional.empty() : Optional.of(outOfCapacity); case INVALID_APPLICATION_PACKAGE: case BAD_REQUEST: logger.log(e.getMessage()); return Optional.of(deploymentFailed); } throw e; } catch (EndpointCertificateException e) { switch (e.type()) { case CERT_NOT_AVAILABLE: if (startTime.plus(timeouts.endpointCertificate()).isBefore(controller.clock().instant())) { logger.log("Deployment failed to find provisioned endpoint certificate after " + timeouts.endpointCertificate()); return Optional.of(RunStatus.endpointCertificateTimeout); } return Optional.empty(); default: throw e; } } } private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) { return installReal(id, true, logger); } private Optional<RunStatus> installReal(RunId id, DualLogger logger) { return installReal(id, false, logger); } private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before installation was successful."); return Optional.of(installationFailed); } Versions versions = controller.jobController().run(id).get().versions(); Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform(); Run run = controller.jobController().run(id).get(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(id.application(), id.type().zone(controller.system())), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), id.application(), ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(id.type().zone(controller.system()), nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); boolean firstTick = run.convergenceSummary().isEmpty(); if (firstTick) { logger.log(" logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); } ConvergenceSummary summary = nodeList.summary(); if (summary.converged()) { controller.jobController().locked(id, lockedRun -> lockedRun.withSummary(null)); if (endpointsAvailable(id.application(), id.type().zone(controller.system()), logger)) { if (containersAreUp(id.application(), id.type().zone(controller.system()), logger)) { logger.log("Installation succeeded!"); return Optional.of(running); } } else if (timedOut(id, deployment.get(), timeouts.endpoint())) { logger.log(WARNING, "Endpoints failed to show up within " + timeouts.endpoint().toMinutes() + " minutes!"); return Optional.of(error); } } String failureReason = null; NodeList suspendedTooLong = nodeList.suspendedSince(controller.clock().instant().minus(timeouts.nodesDown())); if ( ! suspendedTooLong.isEmpty()) { failureReason = "Some nodes have been suspended for more than " + timeouts.nodesDown().toMinutes() + " minutes:\n" + suspendedTooLong.asList().stream().map(node -> node.node().hostname().value()).collect(joining("\n")); } if (run.noNodesDownSince() .map(since -> since.isBefore(controller.clock().instant().minus(timeouts.noNodesDown()))) .orElse(false)) { if (summary.needPlatformUpgrade() > 0 || summary.needReboot() > 0 || summary.needRestart() > 0) failureReason = "No nodes allowed to suspend to progress installation for " + timeouts.noNodesDown().toMinutes() + " minutes."; else failureReason = "Nodes not able to start with new application package."; } Duration timeout = JobRunner.jobTimeout.minusHours(1); if (timedOut(id, deployment.get(), timeout)) { failureReason = "Installation failed to complete within " + timeout.toHours() + "hours!"; } if (failureReason != null) { logger.log(" logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); logger.log(" logger.log(nodeList.not().in(nodeList.not().needsNewConfig() .not().needsPlatformUpgrade() .not().needsReboot() .not().needsRestart() .not().needsFirmwareUpgrade() .not().needsOsUpgrade()) .asList().stream() .flatMap(node -> nodeDetails(node, true)) .collect(toList())); logger.log(INFO, failureReason); return Optional.of(installationFailed); } if ( ! firstTick) logger.log(nodeList.expectedDown().concat(nodeList.needsNewConfig()).asList().stream() .distinct() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); controller.jobController().locked(id, lockedRun -> { Instant noNodesDownSince = nodeList.allowedDown().size() == 0 ? lockedRun.noNodesDownSince().orElse(controller.clock().instant()) : null; return lockedRun.noNodesDownSince(noNodesDownSince).withSummary(summary); }); return Optional.empty(); } private Optional<RunStatus> installTester(RunId id, DualLogger logger) { Run run = controller.jobController().run(id).get(); Version platform = controller.systemVersion(); ZoneId zone = id.type().zone(controller.system()); ApplicationId testerId = id.tester().id(); Optional<ServiceConvergence> services = controller.serviceRegistry().configServer().serviceConvergence(new DeploymentId(testerId, zone), Optional.of(platform)); if (services.isEmpty()) { logger.log("Config status not currently available -- will retry."); return run.stepInfo(installTester).get().startTime().get().isBefore(controller.clock().instant().minus(Duration.ofMinutes(5))) ? Optional.of(error) : Optional.empty(); } List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, testerId, ImmutableSet.of(active, reserved)); List<Node> parents = controller.serviceRegistry().configServer().nodeRepository().list(zone, nodes.stream().map(node -> node.parentHostname().get()).collect(toList())); NodeList nodeList = NodeList.of(nodes, parents, services.get()); logger.log(nodeList.asList().stream() .flatMap(node -> nodeDetails(node, false)) .collect(toList())); if (nodeList.summary().converged() && testerContainersAreUp(testerId, zone, logger)) { logger.log("Tester container successfully installed!"); return Optional.of(running); } if (run.stepInfo(installTester).get().startTime().get().plus(timeouts.tester()).isBefore(controller.clock().instant())) { logger.log(WARNING, "Installation of tester failed to complete within " + timeouts.tester().toMinutes() + " minutes!"); return Optional.of(error); } return Optional.empty(); } /** Returns true iff all containers in the deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean containersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { var endpoints = controller.routing().zoneEndpointsOf(Set.of(new DeploymentId(id, zoneId))); if ( ! endpoints.containsKey(zoneId)) return false; for (var endpoint : endpoints.get(zoneId)) { boolean ready = controller.jobController().cloud().ready(endpoint.url()); if ( ! ready) { logger.log("Failed to get 100 consecutive OKs from " + endpoint); return false; } } return true; } /** Returns true iff all containers in the tester deployment give 100 consecutive 200 OK responses on /status.html. */ private boolean testerContainersAreUp(ApplicationId id, ZoneId zoneId, DualLogger logger) { DeploymentId deploymentId = new DeploymentId(id, zoneId); if (controller.jobController().cloud().testerReady(deploymentId)) { return true; } else { logger.log("Failed to get 100 consecutive OKs from tester container for " + deploymentId); return false; } } private boolean endpointsAvailable(ApplicationId id, ZoneId zone, DualLogger logger) { var endpoints = controller.routing().zoneEndpointsOf(Set.of(new DeploymentId(id, zone))); if ( ! endpoints.containsKey(zone)) { logger.log("Endpoints not yet ready."); return false; } var policies = controller.routing().policies().get(new DeploymentId(id, zone)); for (var endpoint : endpoints.get(zone)) { HostName endpointName = HostName.from(endpoint.dnsName()); var ipAddress = controller.jobController().cloud().resolveHostName(endpointName); if (ipAddress.isEmpty()) { logger.log(INFO, "DNS lookup yielded no IP address for '" + endpointName + "'."); return false; } if (endpoint.routingMethod() == RoutingMethod.exclusive) { var policy = policies.get(new RoutingPolicyId(id, ClusterSpec.Id.from(endpoint.name()), zone)); if (policy == null) throw new IllegalStateException(endpoint + " has no matching policy in " + policies); var cNameValue = controller.jobController().cloud().resolveCname(endpointName); if ( ! cNameValue.map(policy.canonicalName()::equals).orElse(false)) { logger.log(INFO, "CNAME '" + endpointName + "' points at " + cNameValue.map(name -> "'" + name + "'").orElse("nothing") + " but should point at load balancer '" + policy.canonicalName() + "'"); return false; } var loadBalancerAddress = controller.jobController().cloud().resolveHostName(policy.canonicalName()); if ( ! loadBalancerAddress.equals(ipAddress)) { logger.log(INFO, "IP address of CNAME '" + endpointName + "' (" + ipAddress.get() + ") and load balancer '" + policy.canonicalName() + "' (" + loadBalancerAddress.orElse("empty") + ") are not equal"); return false; } } } logEndpoints(endpoints, logger); return true; } private void logEndpoints(Map<ZoneId, List<Endpoint>> zoneEndpoints, DualLogger logger) { List<String> messages = new ArrayList<>(); messages.add("Found endpoints:"); zoneEndpoints.forEach((zone, endpoints) -> { messages.add("- " + zone); for (Endpoint endpoint : endpoints) messages.add(" |-- " + endpoint.url() + " (cluster '" + endpoint.name() + "')"); }); logger.log(messages); } private Stream<String> nodeDetails(NodeWithServices node, boolean printAllServices) { return Stream.concat(Stream.of(node.node().hostname() + ": " + humanize(node.node().serviceState()) + (node.node().suspendedSince().map(since -> " since " + since).orElse("")), "--- platform " + wantedPlatform(node.node()) + (node.needsPlatformUpgrade() ? " <-- " + currentPlatform(node.node()) : "") + (node.needsOsUpgrade() && node.isAllowedDown() ? ", upgrading OS (" + node.node().wantedOsVersion() + " <-- " + node.node().currentOsVersion() + ")" : "") + (node.needsFirmwareUpgrade() && node.isAllowedDown() ? ", upgrading firmware" : "") + (node.needsRestart() ? ", restart pending (" + node.node().wantedRestartGeneration() + " <-- " + node.node().restartGeneration() + ")" : "") + (node.needsReboot() ? ", reboot pending (" + node.node().wantedRebootGeneration() + " <-- " + node.node().rebootGeneration() + ")" : "")), node.services().stream() .filter(service -> printAllServices || node.needsNewConfig()) .map(service -> "--- " + service.type() + " on port " + service.port() + (service.currentGeneration() == -1 ? " has not started " : " has config generation " + service.currentGeneration() + ", wanted is " + node.wantedConfigGeneration()))); } private String wantedPlatform(Node node) { return node.wantedDockerImage().repository() + ":" + node.wantedVersion(); } private String currentPlatform(Node node) { String currentRepo = node.currentDockerImage().repository(); String wantedRepo = node.wantedDockerImage().repository(); return (currentRepo.equals(wantedRepo) ? "" : currentRepo + ":") + node.currentVersion(); } private String humanize(Node.ServiceState state) { switch (state) { case allowedDown: return "allowed to be DOWN"; case expectedUp: return "expected to be UP"; case unorchestrated: return "unorchestrated"; default: return state.name(); } } private Optional<RunStatus> startTests(RunId id, boolean isSetup, DualLogger logger) { Optional<Deployment> deployment = deployment(id.application(), id.type()); if (deployment.isEmpty()) { logger.log(INFO, "Deployment expired before tests could start."); return Optional.of(error); } var deployments = controller.applications().requireInstance(id.application()) .productionDeployments().keySet().stream() .map(zone -> new DeploymentId(id.application(), zone)) .collect(Collectors.toSet()); ZoneId zoneId = id.type().zone(controller.system()); deployments.add(new DeploymentId(id.application(), zoneId)); logger.log("Attempting to find endpoints ..."); var endpoints = controller.routing().zoneEndpointsOf(deployments); if ( ! endpoints.containsKey(zoneId)) { logger.log(WARNING, "Endpoints for the deployment to test vanished again, while it was still active!"); return Optional.of(error); } logEndpoints(endpoints, logger); if (!controller.jobController().cloud().testerReady(getTesterDeploymentId(id))) { logger.log(WARNING, "Tester container went bad!"); return Optional.of(error); } logger.log("Starting tests ..."); TesterCloud.Suite suite = TesterCloud.Suite.of(id.type(), isSetup); byte[] config = testConfigSerializer.configJson(id.application(), id.type(), true, endpoints, controller.applications().contentClustersByZone(deployments)); controller.jobController().cloud().startTests(getTesterDeploymentId(id), suite, config); return Optional.of(running); } private Optional<RunStatus> endTests(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isEmpty()) { logger.log(INFO, "Deployment expired before tests could complete."); return Optional.of(aborted); } Optional<X509Certificate> testerCertificate = controller.jobController().run(id).get().testerCertificate(); if (testerCertificate.isPresent()) { try { testerCertificate.get().checkValidity(Date.from(controller.clock().instant())); } catch (CertificateExpiredException | CertificateNotYetValidException e) { logger.log(INFO, "Tester certificate expired before tests could complete."); return Optional.of(aborted); } } controller.jobController().updateTestLog(id); TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(getTesterDeploymentId(id)); switch (testStatus) { case NOT_STARTED: throw new IllegalStateException("Tester reports tests not started, even though they should have!"); case RUNNING: return Optional.empty(); case FAILURE: logger.log("Tests failed."); return Optional.of(testFailure); case ERROR: logger.log(INFO, "Tester failed running its tests!"); return Optional.of(error); case SUCCESS: logger.log("Tests completed successfully."); return Optional.of(running); default: throw new IllegalStateException("Unknown status '" + testStatus + "'!"); } } private Optional<RunStatus> copyVespaLogs(RunId id, DualLogger logger) { if (deployment(id.application(), id.type()).isPresent()) try { controller.jobController().updateVespaLog(id); } catch (Exception e) { logger.log(INFO, "Failure getting vespa logs for " + id, e); return Optional.of(error); } return Optional.of(running); } private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) { try { logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.applications().deactivate(id.application(), id.type().zone(controller.system())); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting application " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateReal).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) { try { logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ..."); controller.jobController().deactivateTester(id.tester(), id.type()); return Optional.of(running); } catch (RuntimeException e) { logger.log(WARNING, "Failed deleting tester of " + id.application(), e); Instant startTime = controller.jobController().run(id).get().stepInfo(deactivateTester).get().startTime().get(); return startTime.isBefore(controller.clock().instant().minus(Duration.ofHours(1))) ? Optional.of(error) : Optional.empty(); } } private Optional<RunStatus> report(RunId id, DualLogger logger) { try { controller.jobController().active(id).ifPresent(run -> { if (run.hasFailed()) sendNotification(run, logger); }); } catch (IllegalStateException e) { logger.log(INFO, "Job '" + id.type() + "' no longer supposed to run?", e); return Optional.of(error); } return Optional.of(running); } /** Sends a mail with a notification of a failed run, if one should be sent. */ private void sendNotification(Run run, DualLogger logger) { Application application = controller.applications().requireApplication(TenantAndApplicationId.from(run.id().application())); Notifications notifications = application.deploymentSpec().requireInstance(run.id().application().instance()).notifications(); boolean newCommit = application.require(run.id().application().instance()).change().application() .map(run.versions().targetApplication()::equals) .orElse(false); When when = newCommit ? failingCommit : failing; List<String> recipients = new ArrayList<>(notifications.emailAddressesFor(when)); if (notifications.emailRolesFor(when).contains(author)) run.versions().targetApplication().authorEmail().ifPresent(recipients::add); if (recipients.isEmpty()) return; try { logger.log(INFO, "Sending failure notification to " + String.join(", ", recipients)); mailOf(run, recipients).ifPresent(controller.serviceRegistry().mailer()::send); } catch (RuntimeException e) { logger.log(INFO, "Exception trying to send mail for " + run.id(), e); } } private Optional<Mail> mailOf(Run run, List<String> recipients) { switch (run.status()) { case running: case aborted: case success: return Optional.empty(); case outOfCapacity: return run.id().type().isProduction() ? Optional.of(mails.outOfCapacity(run.id(), recipients)) : Optional.empty(); case deploymentFailed: return Optional.of(mails.deploymentFailure(run.id(), recipients)); case installationFailed: return Optional.of(mails.installationFailure(run.id(), recipients)); case testFailure: return Optional.of(mails.testFailure(run.id(), recipients)); case error: case endpointCertificateTimeout: return Optional.of(mails.systemError(run.id(), recipients)); default: logger.log(WARNING, "Don't know what mail to send for run status '" + run.status() + "'"); return Optional.of(mails.systemError(run.id(), recipients)); } } /** Returns the deployment of the real application in the zone of the given job, if it exists. */ private Optional<Deployment> deployment(ApplicationId id, JobType type) { return Optional.ofNullable(application(id).deployments().get(type.zone(controller.system()))); } /** Returns the real application with the given id. */ private Instance application(ApplicationId id) { controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), __ -> { }); return controller.applications().requireInstance(id); } /** * Returns whether the time since deployment is more than the zone deployment expiry, or the given timeout. * * We time out the job before the deployment expires, for zones where deployments are not persistent, * to be able to collect the Vespa log from the deployment. Thus, the lower of the zone's deployment expiry, * and the given default installation timeout, minus one minute, is used as a timeout threshold. */ private boolean timedOut(RunId id, Deployment deployment, Duration defaultTimeout) { Run run = controller.jobController().run(id).get(); if ( ! controller.system().isCd() && run.start().isAfter(deployment.at())) return false; Duration timeout = controller.zoneRegistry().getDeploymentTimeToLive(deployment.zone()) .filter(zoneTimeout -> zoneTimeout.compareTo(defaultTimeout) < 0) .orElse(defaultTimeout); return deployment.at().isBefore(controller.clock().instant().minus(timeout.minus(Duration.ofMinutes(1)))); } /** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */ private ApplicationPackage testerPackage(RunId id) { ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication(); DeploymentSpec spec = controller.applications().requireApplication(TenantAndApplicationId.from(id.application())).deploymentSpec(); ZoneId zone = id.type().zone(controller.system()); boolean useTesterCertificate = controller.system().isPublic() && id.type().environment().isTest(); byte[] servicesXml = servicesXml(! controller.system().isPublic(), useTesterCertificate, testerResourcesFor(zone, spec.requireInstance(id.application().instance()))); byte[] testPackage = controller.applications().applicationStore().getTester(id.application().tenant(), id.application().application(), version); byte[] deploymentXml = deploymentXml(id.tester(), spec.athenzDomain(), spec.requireInstance(id.application().instance()).athenzService(zone.environment(), zone.region())); try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) { zipBuilder.add(testPackage); zipBuilder.add("services.xml", servicesXml); zipBuilder.add("deployment.xml", deploymentXml); if (useTesterCertificate) appendAndStoreCertificate(zipBuilder, id); zipBuilder.close(); return new ApplicationPackage(zipBuilder.toByteArray()); } } private void appendAndStoreCertificate(ZipBuilder zipBuilder, RunId id) { KeyPair keyPair = KeyUtils.generateKeypair(KeyAlgorithm.RSA, 2048); X500Principal subject = new X500Principal("CN=" + id.tester().id().toFullString() + "." + id.type() + "." + id.number()); X509Certificate certificate = X509CertificateBuilder.fromKeypair(keyPair, subject, controller.clock().instant(), controller.clock().instant().plus(timeouts.testerCertificate()), SignatureAlgorithm.SHA512_WITH_RSA, BigInteger.valueOf(1)) .build(); controller.jobController().storeTesterCertificate(id, certificate); zipBuilder.add("artifacts/key", KeyUtils.toPem(keyPair.getPrivate()).getBytes(UTF_8)); zipBuilder.add("artifacts/cert", X509CertificateUtils.toPem(certificate).getBytes(UTF_8)); } private DeploymentId getTesterDeploymentId(RunId runId) { ZoneId zoneId = runId.type().zone(controller.system()); return new DeploymentId(runId.tester().id(), zoneId); } static NodeResources testerResourcesFor(ZoneId zone, DeploymentInstanceSpec spec) { return spec.steps().stream() .filter(step -> step.concerns(zone.environment())) .findFirst() .flatMap(step -> step.zones().get(0).testerFlavor()) .map(NodeResources::fromLegacyName) .orElse(zone.region().value().contains("aws-") ? DEFAULT_TESTER_RESOURCES_AWS : DEFAULT_TESTER_RESOURCES); } /** Returns the generated services.xml content for the tester application. */ static byte[] servicesXml(boolean systemUsesAthenz, boolean useTesterCertificate, NodeResources resources) { int jdiscMemoryGb = 2; int jdiscMemoryPct = (int) Math.ceil(100 * jdiscMemoryGb / resources.memoryGb()); int testMemoryMb = (int) (1024 * (resources.memoryGb() - jdiscMemoryGb) / 2); String resourceString = String.format(Locale.ENGLISH, "<resources vcpu=\"%.2f\" memory=\"%.2fGb\" disk=\"%.2fGb\" disk-speed=\"%s\" storage-type=\"%s\"/>", resources.vcpu(), resources.memoryGb(), resources.diskGb(), resources.diskSpeed().name(), resources.storageType().name()); String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<services xmlns:deploy='vespa' version='1.0'>\n" + " <container version='1.0' id='tester'>\n" + "\n" + " <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" + " <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" + " <artifactsPath>artifacts</artifactsPath>\n" + " <surefireMemoryMb>" + testMemoryMb + "</surefireMemoryMb>\n" + " <useAthenzCredentials>" + systemUsesAthenz + "</useAthenzCredentials>\n" + " <useTesterCertificate>" + useTesterCertificate + "</useTesterCertificate>\n" + " </config>\n" + " </component>\n" + "\n" + " <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" + " <binding>http: " </handler>\n" + "\n" + " <nodes count=\"1\" allocated-memory=\"" + jdiscMemoryPct + "%\">\n" + " " + resourceString + "\n" + " </nodes>\n" + " </container>\n" + "</services>\n"; return servicesXml.getBytes(UTF_8); } /** Returns a dummy deployment xml which sets up the service identity for the tester, if present. */ private static byte[] deploymentXml(TesterId id, Optional<AthenzDomain> athenzDomain, Optional<AthenzService> athenzService) { String deploymentSpec = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<deployment version=\"1.0\" " + athenzDomain.map(domain -> "athenz-domain=\"" + domain.value() + "\" ").orElse("") + athenzService.map(service -> "athenz-service=\"" + service.value() + "\" ").orElse("") + ">" + " <instance id=\"" + id.id().instance().value() + "\" />" + "</deployment>"; return deploymentSpec.getBytes(UTF_8); } /** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */ private class DualLogger { private final RunId id; private final Step step; private DualLogger(RunId id, Step step) { this.id = id; this.step = step; } private void log(String... messages) { log(List.of(messages)); } private void logAll(List<LogEntry> messages) { controller.jobController().log(id, step, messages); } private void log(List<String> messages) { controller.jobController().log(id, step, INFO, messages); } private void log(Level level, String message) { log(level, message, null); } private void logWithInternalException(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); controller.jobController().log(id, step, level, message); } private void log(Level level, String message, Throwable thrown) { logger.log(level, id + " at " + step + ": " + message, thrown); if (thrown != null) { ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream(); thrown.printStackTrace(new PrintStream(traceBuffer)); message += "\n" + traceBuffer; } controller.jobController().log(id, step, level, message); } } static class Timeouts { private final SystemName system; private Timeouts(SystemName system) { this.system = requireNonNull(system); } public static Timeouts of(SystemName system) { return new Timeouts(system); } Duration capacity() { return Duration.ofMinutes(system.isCd() ? 5 : 0); } Duration endpoint() { return Duration.ofMinutes(15); } Duration endpointCertificate() { return Duration.ofMinutes(20); } Duration tester() { return Duration.ofMinutes(30); } Duration nodesDown() { return Duration.ofMinutes(system.isCd() ? 30 : 60); } Duration noNodesDown() { return Duration.ofMinutes(system.isCd() ? 30 : 120); } Duration testerCertificate() { return Duration.ofMinutes(300); } } }
Why sleep here or how can be know how long to sleep?
private TaskDeploymentDescriptor createReceiver(NettyShuffleDescriptor shuffleDescriptor) throws IOException { InputGateDeploymentDescriptor inputGateDeploymentDescriptor = new InputGateDeploymentDescriptor( new IntermediateDataSetID(), ResultPartitionType.PIPELINED, 0, new ShuffleDescriptor[] {shuffleDescriptor}); return createTestTaskDeploymentDescriptor( "Receiver", new ExecutionAttemptID(), TestingAbstractInvokables.Receiver.class, 1, Collections.emptyList(), Collections.singletonList(inputGateDeploymentDescriptor)); }
ResultPartitionType.PIPELINED,
private TaskDeploymentDescriptor createReceiver(NettyShuffleDescriptor shuffleDescriptor) throws IOException { InputGateDeploymentDescriptor inputGateDeploymentDescriptor = new InputGateDeploymentDescriptor( new IntermediateDataSetID(), ResultPartitionType.PIPELINED, 0, new ShuffleDescriptor[] {shuffleDescriptor}); return createTestTaskDeploymentDescriptor( "Receiver", new ExecutionAttemptID(), TestingAbstractInvokables.Receiver.class, 1, Collections.emptyList(), Collections.singletonList(inputGateDeploymentDescriptor)); }
class TaskExecutorSubmissionTest extends TestLogger { @Rule public final TestName testName = new TestName(); private static final Time timeout = Time.milliseconds(10000L); private JobID jobId = new JobID(); /** * Tests that we can submit a task to the TaskManager given that we've allocated a slot there. */ @Test(timeout = 10000L) public void testTaskSubmission() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor("test task", eid, TaskExecutorTest.TestInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); } } /** * Tests that the TaskManager sends a proper exception back to the sender if the submit task * message fails. */ @Test(timeout = 10000L) public void testSubmitTaskFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor( "test task", eid, BlockingNoOpInvokable.class, 0); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); } catch (Exception e) { assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); } } /** * Tests that we can cancel the task of the TaskManager given that we've submitted it. */ @Test(timeout = 10000L) public void testTaskSubmissionAndCancelling() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor("test task", eid1, BlockingNoOpInvokable.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor("test task", eid2, BlockingNoOpInvokable.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1CanceledFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.CANCELED, task1CanceledFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.RUNNING); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); tmGateway.cancelTask(eid1, timeout); task1CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.CANCELED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); } } /** * Tests that submitted tasks will fail when attempting to send/receive data if no * ResultPartitions/InputGates are set up. */ @Test(timeout = 10000L) public void testGateChannelEdgeMismatch() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor("Sender", eid1, TestingAbstractInvokables.Sender.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor("Receiver", eid2, TestingAbstractInvokables.Receiver.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener(eid2, ExecutionState.FAILED, task2FailedFuture) .setSlotSize(2) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); task2FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FAILED); } } @Test(timeout = 10000L) public void testRunJobWithForwardChannel() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FinishedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FinishedFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setScheduleOrUpdateConsumersFunction( resultPartitionID -> CompletableFuture.completedFuture(Acknowledge.get())) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FINISHED, task1FinishedFuture) .addTaskManagerActionListener(eid2, ExecutionState.FINISHED, task2FinishedFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FinishedFuture.get(); task2FinishedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FINISHED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FINISHED); } } /** * This tests creates two tasks. The sender sends data but fails to send the * state update back to the job manager. * the second one blocks to be canceled */ @Test(timeout = 10000L) public void testCancellingDependentAndStateUpdateFails() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2CanceledFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setUpdateTaskExecutionStateFunction(taskExecutionState -> { if (taskExecutionState != null && taskExecutionState.getID().equals(eid1)) { return FutureUtils.completedExceptionally( new ExecutionGraphException("The execution attempt " + eid2 + " was not found.")); } else { return CompletableFuture.completedFuture(Acknowledge.get()); } }) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener(eid2, ExecutionState.CANCELED, task2CanceledFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); tmGateway.cancelTask(eid2, timeout); task2CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.CANCELED); } } /** * Tests that repeated remote {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = 10000L) public void testRemotePartitionNotFound() throws Exception { final int dataPort = NetUtils.getAvailablePort(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.DATA_PORT, dataPort); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); NettyShuffleDescriptor sdd = NettyShuffleDescriptorBuilder.newBuilder().setDataPort(dataPort).buildRemote(); TaskDeploymentDescriptor tdd = createReceiver(sdd); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setConfiguration(config) .setLocalCommunication(false) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertThat(taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** * Tests that the TaskManager fails the task if the partition update fails. */ @Test public void testUpdateTaskInputPartitionsFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor("test task", eid, BlockingNoOpInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); final ShuffleEnvironment<?, ?> shuffleEnvironment = mock(ShuffleEnvironment.class, Mockito.RETURNS_MOCKS); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setShuffleEnvironment(shuffleEnvironment) .setSlotSize(1) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); final ResourceID producerLocation = env.getTaskExecutor().getResourceID(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); final PartitionInfo partitionUpdate = new PartitionInfo(new IntermediateDataSetID(), shuffleDescriptor); doThrow(new IOException()).when(shuffleEnvironment).updatePartitionInfo(eid, partitionUpdate); final CompletableFuture<Acknowledge> updateFuture = tmGateway.updatePartitions( eid, Collections.singletonList(partitionUpdate), timeout); updateFuture.get(); taskFailedFuture.get(); Task task = taskSlotTable.getTask(tdd.getExecutionAttemptId()); assertThat(task.getExecutionState(), is(ExecutionState.FAILED)); assertThat(task.getFailureCause(), instanceOf(IOException.class)); } } /** * Tests that repeated local {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = 10000L) public void testLocalPartitionNotFound() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd = createReceiver(shuffleDescriptor); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(1) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setConfiguration(config) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertSame(taskSlotTable.getTask(eid).getExecutionState(), ExecutionState.FAILED); assertThat(taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** * Test that a failing schedule or update consumers call leads to the failing of the respective * task. * * <p>IMPORTANT: We have to make sure that the invokable's cancel method is called, because only * then the future is completed. We do this by not eagerly deploying consumer tasks and requiring * the invokable to fill one memory segment. The completed memory segment will trigger the * scheduling of the downstream operator since it is in pipeline mode. After we've filled the * memory segment, we'll block the invokable and wait for the task failure due to the failed * schedule or update consumers call. */ @Test(timeout = 10000L) public void testFailingScheduleOrUpdateConsumers() throws Exception { final Configuration configuration = new Configuration(); configuration.setString(TaskManagerOptions.MEMORY_SEGMENT_SIZE, "4096"); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), ResourceID.generate()); TaskDeploymentDescriptor tdd = createSender(sdd, TestingAbstractInvokables.TestInvokableRecordCancel.class); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final Exception exception = new Exception("Failed schedule or update consumers"); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setUpdateTaskExecutionStateFunction(resultPartitionID -> FutureUtils.completedExceptionally(exception)) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .setConfiguration(configuration) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); TestingAbstractInvokables.TestInvokableRecordCancel.resetGotCanceledFuture(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, jobMasterId, timeout).get(); taskRunningFuture.get(); CompletableFuture<Boolean> cancelFuture = TestingAbstractInvokables.TestInvokableRecordCancel.gotCanceled(); assertTrue(cancelFuture.get()); assertTrue(ExceptionUtils.findThrowableWithMessage(taskSlotTable.getTask(eid).getFailureCause(), exception.getMessage()).isPresent()); } } /** * Tests request of task back pressure. */ @Test(timeout = 20000L) public void testRequestTaskBackPressure() throws Exception { final NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), ResourceID.generate()); final TaskDeploymentDescriptor tdd = createSender(shuffleDescriptor, OutputBlockedInvokable.class); final ExecutionAttemptID executionAttemptID = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskCanceledFuture = new CompletableFuture<>(); final Configuration configuration = new Configuration(); configuration.set(WebOptions.BACKPRESSURE_NUM_SAMPLES, 10); configuration.set(WebOptions.BACKPRESSURE_DELAY, 200); try (final TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .setConfiguration(configuration) .useRealNonMockShuffleEnvironment() .addTaskManagerActionListener(executionAttemptID, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(executionAttemptID, ExecutionState.CANCELED, taskCanceledFuture) .build()) { final TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); final TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); final int requestId = 1234; final ExecutionAttemptID nonExistTaskEid = new ExecutionAttemptID(); final CompletableFuture<TaskBackPressureResponse> failedRequestFuture = tmGateway.requestTaskBackPressure(nonExistTaskEid, requestId, timeout); try { failedRequestFuture.get(); } catch (Exception e) { assertThat(e.getCause(), instanceOf(IllegalStateException.class)); assertThat(e.getCause().getMessage(), startsWith("Cannot request back pressure")); } double backPressureRatio = 0; for (int i = 0; i < 5; ++i) { CompletableFuture<TaskBackPressureResponse> successfulRequestFuture = tmGateway.requestTaskBackPressure(executionAttemptID, i, timeout); TaskBackPressureResponse response = successfulRequestFuture.get(); assertEquals(response.getRequestId(), i); assertEquals(response.getExecutionAttemptID(), executionAttemptID); if ((backPressureRatio = response.getBackPressureRatio()) >= 1.0) { break; } } assertEquals("Task was not back pressured in given time.", 1.0, backPressureRatio, 0.0); final int sleepTime = 1000; CompletableFuture<TaskBackPressureResponse> canceledRequestFuture = tmGateway.requestTaskBackPressure(executionAttemptID, requestId, timeout); Thread.sleep(sleepTime); tmGateway.cancelTask(executionAttemptID, timeout); taskCanceledFuture.get(); TaskBackPressureResponse responseAfterCancel = canceledRequestFuture.get(); assertEquals(executionAttemptID, responseAfterCancel.getExecutionAttemptID()); assertEquals(requestId, responseAfterCancel.getRequestId()); assertTrue(responseAfterCancel.getBackPressureRatio() > 0); } } private TaskDeploymentDescriptor createSender(NettyShuffleDescriptor shuffleDescriptor) throws IOException { return createSender(shuffleDescriptor, TestingAbstractInvokables.Sender.class); } private TaskDeploymentDescriptor createSender( NettyShuffleDescriptor shuffleDescriptor, Class<? extends AbstractInvokable> abstractInvokable) throws IOException { PartitionDescriptor partitionDescriptor = new PartitionDescriptor( new IntermediateDataSetID(), shuffleDescriptor.getResultPartitionID().getPartitionId(), ResultPartitionType.PIPELINED, 1, 0); ResultPartitionDeploymentDescriptor resultPartitionDeploymentDescriptor = new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, 1, true); return createTestTaskDeploymentDescriptor( "Sender", shuffleDescriptor.getResultPartitionID().getProducerId(), abstractInvokable, 1, Collections.singletonList(resultPartitionDeploymentDescriptor), Collections.emptyList()); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable ) throws IOException { return createTestTaskDeploymentDescriptor(taskName, eid, abstractInvokable, 1); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks ) throws IOException { return createTestTaskDeploymentDescriptor(taskName, eid, abstractInvokable, maxNumberOfSubtasks, Collections.emptyList(), Collections.emptyList()); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks, Collection<ResultPartitionDeploymentDescriptor> producedPartitions, Collection<InputGateDeploymentDescriptor> inputGates ) throws IOException { Preconditions.checkNotNull(producedPartitions); Preconditions.checkNotNull(inputGates); return createTaskDeploymentDescriptor( jobId, testName.getMethodName(), eid, new SerializedValue<>(new ExecutionConfig()), taskName, maxNumberOfSubtasks, 0, 1, 0, new Configuration(), new Configuration(), abstractInvokable.getName(), producedPartitions, inputGates, Collections.emptyList(), Collections.emptyList(), 0); } static TaskDeploymentDescriptor createTaskDeploymentDescriptor( JobID jobId, String jobName, ExecutionAttemptID executionAttemptId, SerializedValue<ExecutionConfig> serializedExecutionConfig, String taskName, int maxNumberOfSubtasks, int subtaskIndex, int numberOfSubtasks, int attemptNumber, Configuration jobConfiguration, Configuration taskConfiguration, String invokableClassName, Collection<ResultPartitionDeploymentDescriptor> producedPartitions, Collection<InputGateDeploymentDescriptor> inputGates, Collection<PermanentBlobKey> requiredJarFiles, Collection<URL> requiredClasspaths, int targetSlotNumber) throws IOException { JobInformation jobInformation = new JobInformation( jobId, jobName, serializedExecutionConfig, jobConfiguration, requiredJarFiles, requiredClasspaths); TaskInformation taskInformation = new TaskInformation( new JobVertexID(), taskName, numberOfSubtasks, maxNumberOfSubtasks, invokableClassName, taskConfiguration); SerializedValue<JobInformation> serializedJobInformation = new SerializedValue<>(jobInformation); SerializedValue<TaskInformation> serializedJobVertexInformation = new SerializedValue<>(taskInformation); return new TaskDeploymentDescriptor( jobId, new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobInformation), new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobVertexInformation), executionAttemptId, new AllocationID(), subtaskIndex, attemptNumber, targetSlotNumber, null, producedPartitions, inputGates); } }
class TaskExecutorSubmissionTest extends TestLogger { @Rule public final TestName testName = new TestName(); private static final Time timeout = Time.milliseconds(10000L); private JobID jobId = new JobID(); private MetricRegistryImpl metricRegistry; private TestingRpcService rpcService; private String metricQueryServiceAddress; @Before public void setup() { rpcService = new TestingRpcService(); metricRegistry = new MetricRegistryImpl(MetricRegistryConfiguration.defaultMetricRegistryConfiguration()); metricRegistry.startQueryService(rpcService, new ResourceID("mqs")); metricQueryServiceAddress = metricRegistry.getMetricQueryServiceGatewayRpcAddress(); } @After public void teardown() throws ExecutionException, InterruptedException { if (rpcService != null) { rpcService.stopService().get(); } if (metricRegistry != null) { metricRegistry.shutdown().get(); } } /** * Tests that we can submit a task to the TaskManager given that we've allocated a slot there. */ @Test(timeout = 10000L) public void testTaskSubmission() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor("test task", eid, TaskExecutorTest.TestInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); } } /** * Tests that the TaskManager sends a proper exception back to the sender if the submit task * message fails. */ @Test(timeout = 10000L) public void testSubmitTaskFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor( "test task", eid, BlockingNoOpInvokable.class, 0); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setMetricQueryServiceAddress(metricQueryServiceAddress) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); } catch (Exception e) { assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); } } /** * Tests that we can cancel the task of the TaskManager given that we've submitted it. */ @Test(timeout = 10000L) public void testTaskSubmissionAndCancelling() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor("test task", eid1, BlockingNoOpInvokable.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor("test task", eid2, BlockingNoOpInvokable.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1CanceledFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.CANCELED, task1CanceledFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.RUNNING); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); tmGateway.cancelTask(eid1, timeout); task1CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.CANCELED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); } } /** * Tests that submitted tasks will fail when attempting to send/receive data if no * ResultPartitions/InputGates are set up. */ @Test(timeout = 10000L) public void testGateChannelEdgeMismatch() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor("Sender", eid1, TestingAbstractInvokables.Sender.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor("Receiver", eid2, TestingAbstractInvokables.Receiver.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener(eid2, ExecutionState.FAILED, task2FailedFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .setSlotSize(2) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); task2FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FAILED); } } @Test(timeout = 10000L) public void testRunJobWithForwardChannel() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FinishedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FinishedFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setScheduleOrUpdateConsumersFunction( resultPartitionID -> CompletableFuture.completedFuture(Acknowledge.get())) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FINISHED, task1FinishedFuture) .addTaskManagerActionListener(eid2, ExecutionState.FINISHED, task2FinishedFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FinishedFuture.get(); task2FinishedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FINISHED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FINISHED); } } /** * This tests creates two tasks. The sender sends data but fails to send the * state update back to the job manager. * the second one blocks to be canceled */ @Test(timeout = 10000L) public void testCancellingDependentAndStateUpdateFails() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2CanceledFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setUpdateTaskExecutionStateFunction(taskExecutionState -> { if (taskExecutionState != null && taskExecutionState.getID().equals(eid1)) { return FutureUtils.completedExceptionally( new ExecutionGraphException("The execution attempt " + eid2 + " was not found.")); } else { return CompletableFuture.completedFuture(Acknowledge.get()); } }) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener(eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener(eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener(eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener(eid2, ExecutionState.CANCELED, task2CanceledFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); tmGateway.cancelTask(eid2, timeout); task2CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.CANCELED); } } /** * Tests that repeated remote {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = 10000L) public void testRemotePartitionNotFound() throws Exception { final int dataPort = NetUtils.getAvailablePort(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.DATA_PORT, dataPort); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); NettyShuffleDescriptor sdd = NettyShuffleDescriptorBuilder.newBuilder().setDataPort(dataPort).buildRemote(); TaskDeploymentDescriptor tdd = createReceiver(sdd); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setMetricQueryServiceAddress(metricQueryServiceAddress) .setConfiguration(config) .setLocalCommunication(false) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertThat(taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** * Tests that the TaskManager fails the task if the partition update fails. */ @Test public void testUpdateTaskInputPartitionsFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor("test task", eid, BlockingNoOpInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); final ShuffleEnvironment<?, ?> shuffleEnvironment = mock(ShuffleEnvironment.class, Mockito.RETURNS_MOCKS); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setShuffleEnvironment(shuffleEnvironment) .setSlotSize(1) .setMetricQueryServiceAddress(metricQueryServiceAddress) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); final ResourceID producerLocation = env.getTaskExecutor().getResourceID(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); final PartitionInfo partitionUpdate = new PartitionInfo(new IntermediateDataSetID(), shuffleDescriptor); doThrow(new IOException()).when(shuffleEnvironment).updatePartitionInfo(eid, partitionUpdate); final CompletableFuture<Acknowledge> updateFuture = tmGateway.updatePartitions( eid, Collections.singletonList(partitionUpdate), timeout); updateFuture.get(); taskFailedFuture.get(); Task task = taskSlotTable.getTask(tdd.getExecutionAttemptId()); assertThat(task.getExecutionState(), is(ExecutionState.FAILED)); assertThat(task.getFailureCause(), instanceOf(IOException.class)); } } /** * Tests that repeated local {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = 10000L) public void testLocalPartitionNotFound() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd = createReceiver(shuffleDescriptor); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(1) .setMetricQueryServiceAddress(metricQueryServiceAddress) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setConfiguration(config) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertSame(taskSlotTable.getTask(eid).getExecutionState(), ExecutionState.FAILED); assertThat(taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** * Test that a failing schedule or update consumers call leads to the failing of the respective * task. * * <p>IMPORTANT: We have to make sure that the invokable's cancel method is called, because only * then the future is completed. We do this by not eagerly deploying consumer tasks and requiring * the invokable to fill one memory segment. The completed memory segment will trigger the * scheduling of the downstream operator since it is in pipeline mode. After we've filled the * memory segment, we'll block the invokable and wait for the task failure due to the failed * schedule or update consumers call. */ @Test(timeout = 10000L) public void testFailingScheduleOrUpdateConsumers() throws Exception { final Configuration configuration = new Configuration(); configuration.setString(TaskManagerOptions.MEMORY_SEGMENT_SIZE, "4096"); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), ResourceID.generate()); TaskDeploymentDescriptor tdd = createSender(sdd, TestingAbstractInvokables.TestInvokableRecordCancel.class); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final Exception exception = new Exception("Failed schedule or update consumers"); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setUpdateTaskExecutionStateFunction(resultPartitionID -> FutureUtils.completedExceptionally(exception)) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .setConfiguration(configuration) .setMetricQueryServiceAddress(metricQueryServiceAddress) .addTaskManagerActionListener(eid, ExecutionState.RUNNING, taskRunningFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); TestingAbstractInvokables.TestInvokableRecordCancel.resetGotCanceledFuture(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, jobMasterId, timeout).get(); taskRunningFuture.get(); CompletableFuture<Boolean> cancelFuture = TestingAbstractInvokables.TestInvokableRecordCancel.gotCanceled(); assertTrue(cancelFuture.get()); assertTrue(ExceptionUtils.findThrowableWithMessage(taskSlotTable.getTask(eid).getFailureCause(), exception.getMessage()).isPresent()); } } /** * Tests request of task back pressure. */ @Test(timeout = 20000L) public void testRequestTaskBackPressure() throws Exception { final NettyShuffleDescriptor shuffleDescriptor = newBuilder().buildLocal(); final TaskDeploymentDescriptor tdd = createSender(shuffleDescriptor, OutputBlockedInvokable.class); final ExecutionAttemptID executionAttemptID = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskCanceledFuture = new CompletableFuture<>(); final Configuration configuration = new Configuration(); configuration.set(WebOptions.BACKPRESSURE_NUM_SAMPLES, 40); configuration.setString(TaskManagerOptions.MEMORY_SEGMENT_SIZE, "4096"); try (final TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .setMetricQueryServiceAddress(metricQueryServiceAddress) .setConfiguration(configuration) .useRealNonMockShuffleEnvironment() .addTaskManagerActionListener(executionAttemptID, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(executionAttemptID, ExecutionState.CANCELED, taskCanceledFuture) .build()) { final TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); final TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); final int requestId = 1234; final ExecutionAttemptID nonExistTaskEid = new ExecutionAttemptID(); final CompletableFuture<TaskBackPressureResponse> failedRequestFuture = tmGateway.requestTaskBackPressure(nonExistTaskEid, requestId, timeout); try { failedRequestFuture.get(); } catch (Exception e) { assertThat(e.getCause(), instanceOf(IllegalStateException.class)); assertThat(e.getCause().getMessage(), startsWith("Cannot request back pressure")); } double backPressureRatio = 0; for (int i = 0; i < 5; ++i) { CompletableFuture<TaskBackPressureResponse> successfulRequestFuture = tmGateway.requestTaskBackPressure(executionAttemptID, i, timeout); TaskBackPressureResponse response = successfulRequestFuture.get(); assertEquals(response.getRequestId(), i); assertEquals(response.getExecutionAttemptID(), executionAttemptID); if ((backPressureRatio = response.getBackPressureRatio()) >= 1.0) { break; } } assertEquals("Task was not back pressured in given time.", 1.0, backPressureRatio, 0.0); final int sleepTime = 1000; CompletableFuture<TaskBackPressureResponse> canceledRequestFuture = tmGateway.requestTaskBackPressure(executionAttemptID, requestId, timeout); Thread.sleep(sleepTime); tmGateway.cancelTask(executionAttemptID, timeout); taskCanceledFuture.get(); TaskBackPressureResponse responseAfterCancel = canceledRequestFuture.get(); assertEquals(executionAttemptID, responseAfterCancel.getExecutionAttemptID()); assertEquals(requestId, responseAfterCancel.getRequestId()); assertTrue(responseAfterCancel.getBackPressureRatio() > 0); } } private TaskDeploymentDescriptor createSender(NettyShuffleDescriptor shuffleDescriptor) throws IOException { return createSender(shuffleDescriptor, TestingAbstractInvokables.Sender.class); } private TaskDeploymentDescriptor createSender( NettyShuffleDescriptor shuffleDescriptor, Class<? extends AbstractInvokable> abstractInvokable) throws IOException { PartitionDescriptor partitionDescriptor = new PartitionDescriptor( new IntermediateDataSetID(), shuffleDescriptor.getResultPartitionID().getPartitionId(), ResultPartitionType.PIPELINED, 1, 0); ResultPartitionDeploymentDescriptor resultPartitionDeploymentDescriptor = new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, 1, true); return createTestTaskDeploymentDescriptor( "Sender", shuffleDescriptor.getResultPartitionID().getProducerId(), abstractInvokable, 1, Collections.singletonList(resultPartitionDeploymentDescriptor), Collections.emptyList()); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable ) throws IOException { return createTestTaskDeploymentDescriptor(taskName, eid, abstractInvokable, 1); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks ) throws IOException { return createTestTaskDeploymentDescriptor(taskName, eid, abstractInvokable, maxNumberOfSubtasks, Collections.emptyList(), Collections.emptyList()); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks, Collection<ResultPartitionDeploymentDescriptor> producedPartitions, Collection<InputGateDeploymentDescriptor> inputGates ) throws IOException { Preconditions.checkNotNull(producedPartitions); Preconditions.checkNotNull(inputGates); return createTaskDeploymentDescriptor( jobId, testName.getMethodName(), eid, new SerializedValue<>(new ExecutionConfig()), taskName, maxNumberOfSubtasks, 0, 1, 0, new Configuration(), new Configuration(), abstractInvokable.getName(), producedPartitions, inputGates, Collections.emptyList(), Collections.emptyList(), 0); } static TaskDeploymentDescriptor createTaskDeploymentDescriptor( JobID jobId, String jobName, ExecutionAttemptID executionAttemptId, SerializedValue<ExecutionConfig> serializedExecutionConfig, String taskName, int maxNumberOfSubtasks, int subtaskIndex, int numberOfSubtasks, int attemptNumber, Configuration jobConfiguration, Configuration taskConfiguration, String invokableClassName, Collection<ResultPartitionDeploymentDescriptor> producedPartitions, Collection<InputGateDeploymentDescriptor> inputGates, Collection<PermanentBlobKey> requiredJarFiles, Collection<URL> requiredClasspaths, int targetSlotNumber) throws IOException { JobInformation jobInformation = new JobInformation( jobId, jobName, serializedExecutionConfig, jobConfiguration, requiredJarFiles, requiredClasspaths); TaskInformation taskInformation = new TaskInformation( new JobVertexID(), taskName, numberOfSubtasks, maxNumberOfSubtasks, invokableClassName, taskConfiguration); SerializedValue<JobInformation> serializedJobInformation = new SerializedValue<>(jobInformation); SerializedValue<TaskInformation> serializedJobVertexInformation = new SerializedValue<>(taskInformation); return new TaskDeploymentDescriptor( jobId, new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobInformation), new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobVertexInformation), executionAttemptId, new AllocationID(), subtaskIndex, attemptNumber, targetSlotNumber, null, producedPartitions, inputGates); } }
Removes the use of a Supplier function to store the TableSchema and simply uses a class property to hold the TableSchema.
public T read(T reuse, Decoder in) throws IOException { GenericRecord record = (GenericRecord) this.reader.read(reuse, in); return parseFn.apply(new SchemaAndRecord(record, this.tableSchema)); }
return parseFn.apply(new SchemaAndRecord(record, this.tableSchema));
public T read(T reuse, Decoder in) throws IOException { GenericRecord record = (GenericRecord) this.reader.read(reuse, in); return parseFn.apply(new SchemaAndRecord(record, this.tableSchema)); }
class GenericDatumTransformer<T> implements DatumReader<T> { private final SerializableFunction<SchemaAndRecord, T> parseFn; private final TableSchema tableSchema; private GenericDatumReader<T> reader; private org.apache.avro.Schema writerSchema; public GenericDatumTransformer( SerializableFunction<SchemaAndRecord, T> parseFn, String tableSchema, org.apache.avro.Schema writer) { this.parseFn = parseFn; this.tableSchema = new TableSchemaFunction().apply(tableSchema); this.writerSchema = writer; this.reader = new GenericDatumReader<>(this.writerSchema); } public GenericDatumTransformer( SerializableFunction<SchemaAndRecord, T> parseFn, TableSchema tableSchema, org.apache.avro.Schema writer) { this.parseFn = parseFn; this.tableSchema = tableSchema; this.writerSchema = writer; this.reader = new GenericDatumReader<>(this.writerSchema); } @Override public void setSchema(org.apache.avro.Schema schema) { if (this.writerSchema.equals(schema)) { return; } this.writerSchema = schema; this.reader = new GenericDatumReader<>(this.writerSchema); } @Override }
class GenericDatumTransformer<T> implements DatumReader<T> { private final SerializableFunction<SchemaAndRecord, T> parseFn; private final TableSchema tableSchema; private GenericDatumReader<T> reader; private org.apache.avro.Schema writerSchema; public GenericDatumTransformer( SerializableFunction<SchemaAndRecord, T> parseFn, TableSchema tableSchema, org.apache.avro.Schema writer) { this.parseFn = parseFn; this.tableSchema = tableSchema; this.writerSchema = writer; this.reader = new GenericDatumReader<>(this.writerSchema); } @Override public void setSchema(org.apache.avro.Schema schema) { if (this.writerSchema.equals(schema)) { return; } this.writerSchema = schema; this.reader = new GenericDatumReader<>(this.writerSchema); } @Override }
This is just a comment, it won't hurt to change it now.
public A build(ClassOutput classOutput) { generatedLiterals.computeIfAbsent(annotationLiteral, generatedName -> { String name = annotationInstance.name().toString(); String signature = String.format("L%1$s<L%2$s;>;L%2$s;", AnnotationLiteral.class.getName().replace('.', '/'), name.replace('.', '/')); ClassCreator literal = ClassCreator.builder().classOutput(classOutput).className(generatedName) .superClass(AnnotationLiteral.class) .interfaces(name).signature(signature).build(); List<MethodInfo> constructorParams = annotationClass.methods().stream() .filter(m -> !m.name().equals("<clinit>") && !m.name().equals("<init>")) .collect(Collectors.toList()); MethodCreator constructor = literal.getMethodCreator("<init>", "V", constructorParams.stream().map(m -> m.returnType().name().toString()).toArray()); constructor.invokeSpecialMethod(MethodDescriptor.ofConstructor(AnnotationLiteral.class), constructor.getThis()); for (ListIterator<MethodInfo> iterator = constructorParams.listIterator(); iterator.hasNext();) { MethodInfo param = iterator.next(); String returnType = param.returnType().name().toString(); literal.getFieldCreator(param.name(), returnType).setModifiers(ACC_PRIVATE | ACC_FINAL); constructor.writeInstanceField(FieldDescriptor.of(literal.getClassName(), param.name(), returnType), constructor.getThis(), constructor.getMethodParam(iterator.previousIndex())); MethodCreator value = literal.getMethodCreator(param.name(), returnType).setModifiers(ACC_PUBLIC); value.returnValue(value.readInstanceField( FieldDescriptor.of(literal.getClassName(), param.name(), returnType), value.getThis())); } constructor.returnValue(null); literal.close(); return Boolean.TRUE; }); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = AnnotationProxy.class.getClassLoader(); } return (A) Proxy.newProxyInstance(classLoader, new Class[] { annotationType, AnnotationProxy.class }, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { switch (method.getName()) { case "getAnnotationLiteralType": return annotationLiteral; case "getAnnotationClass": return annotationClass; case "getAnnotationInstance": return annotationInstance; case "getDefaultValues": return defaultValues; case "getValues": return values; default: break; } throw new UnsupportedOperationException("Method " + method + " not implemented"); } }); }
public A build(ClassOutput classOutput) { generatedLiterals.computeIfAbsent(annotationLiteral, generatedName -> { String name = annotationInstance.name().toString(); String signature = String.format("L%1$s<L%2$s;>;L%2$s;", AnnotationLiteral.class.getName().replace('.', '/'), name.replace('.', '/')); ClassCreator literal = ClassCreator.builder().classOutput(classOutput).className(generatedName) .superClass(AnnotationLiteral.class) .interfaces(name).signature(signature).build(); List<MethodInfo> constructorParams = annotationClass.methods().stream() .filter(m -> !m.name().equals("<clinit>") && !m.name().equals("<init>")) .collect(Collectors.toList()); MethodCreator constructor = literal.getMethodCreator("<init>", "V", constructorParams.stream().map(m -> m.returnType().name().toString()).toArray()); constructor.invokeSpecialMethod(MethodDescriptor.ofConstructor(AnnotationLiteral.class), constructor.getThis()); for (ListIterator<MethodInfo> iterator = constructorParams.listIterator(); iterator.hasNext();) { MethodInfo param = iterator.next(); String returnType = param.returnType().name().toString(); literal.getFieldCreator(param.name(), returnType).setModifiers(ACC_PRIVATE | ACC_FINAL); constructor.writeInstanceField(FieldDescriptor.of(literal.getClassName(), param.name(), returnType), constructor.getThis(), constructor.getMethodParam(iterator.previousIndex())); MethodCreator value = literal.getMethodCreator(param.name(), returnType).setModifiers(ACC_PUBLIC); value.returnValue(value.readInstanceField( FieldDescriptor.of(literal.getClassName(), param.name(), returnType), value.getThis())); } constructor.returnValue(null); literal.close(); return Boolean.TRUE; }); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = AnnotationProxy.class.getClassLoader(); } return (A) Proxy.newProxyInstance(classLoader, new Class[] { annotationType, AnnotationProxy.class }, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { switch (method.getName()) { case "getAnnotationLiteralType": return annotationLiteral; case "getAnnotationClass": return annotationClass; case "getAnnotationInstance": return annotationInstance; case "getDefaultValues": return defaultValues; case "getValues": return values; default: break; } throw new UnsupportedOperationException("Method " + method + " not implemented"); } }); }
class AnnotationProxyBuilder<A> { private final ClassInfo annotationClass; private final String annotationLiteral; private final AnnotationInstance annotationInstance; private final Class<A> annotationType; private final Map<String, Object> defaultValues = new HashMap<>(); private final Map<String, Object> values = new HashMap<>(); AnnotationProxyBuilder(AnnotationInstance annotationInstance, Class<A> annotationType, String annotationLiteral, ClassInfo annotationClass) { this.annotationInstance = annotationInstance; this.annotationType = annotationType; this.annotationLiteral = annotationLiteral; this.annotationClass = annotationClass; } /** * Explicit values override the default values from the annotation class. * * @param name * @param value * @return self */ public AnnotationProxyBuilder<A> withValue(String name, Object value) { values.put(name, value); return this; } /** * Explicit default values override the default values from the annotation class. * * @param name * @param value * @return self */ public AnnotationProxyBuilder<A> withDefaultValue(String name, Object value) { if (annotationInstance.value(name) == null) { defaultValues.put(name, value); } return this; } @SuppressWarnings("unchecked") }
class AnnotationProxyBuilder<A> { private final ClassInfo annotationClass; private final String annotationLiteral; private final AnnotationInstance annotationInstance; private final Class<A> annotationType; private final Map<String, Object> defaultValues = new HashMap<>(); private final Map<String, Object> values = new HashMap<>(); AnnotationProxyBuilder(AnnotationInstance annotationInstance, Class<A> annotationType, String annotationLiteral, ClassInfo annotationClass) { this.annotationInstance = annotationInstance; this.annotationType = annotationType; this.annotationLiteral = annotationLiteral; this.annotationClass = annotationClass; } /** * Explicit values override the default values from the annotation class. * * @param name * @param value * @return self */ public AnnotationProxyBuilder<A> withValue(String name, Object value) { values.put(name, value); return this; } /** * Explicit default values override the default values from the annotation class. * * @param name * @param value * @return self */ public AnnotationProxyBuilder<A> withDefaultValue(String name, Object value) { if (annotationInstance.value(name) == null) { defaultValues.put(name, value); } return this; } @SuppressWarnings("unchecked") }
I didn't get it... for the case of one array `arrays.length == 1` and it will not enter this `if`. Also having just one array is ok, in that case we can just return it back. Why are we talking about `"need at least two arrays"` ?
private InType[] convertToArrays(InType[] arrays) { if (arrays == null || arrays.length < 1) { throw new ValidationException("need at least two arrays"); } int numberOfNull = 0; InType notNullArray = null; for (int i = 0; i < arrays.length; ++i) { if (arrays[i] == null) { numberOfNull++; } else { notNullArray = arrays[i]; } } if (numberOfNull == arrays.length) { return (InType[]) new Object[] {arrays}; } if (!(notNullArray instanceof Object[])) { return (InType[]) new Object[] {arrays}; } else { return arrays; } }
throw new ValidationException("need at least two arrays");
private InType[] convertToArrays(InType[] arrays) { if (arrays == null || arrays.length == 0) { return arrays; } InType notNullArray = null; for (int i = 0; i < arrays.length; ++i) { if (arrays[i] != null) { notNullArray = arrays[i]; } } if (!(notNullArray instanceof Object[])) { return (InType[]) new Object[] {arrays}; } else { return arrays; } }
class BaseExpressions<InType, OutType> { protected abstract Expression toExpr(); protected abstract OutType toApiSpecificExpression(Expression expression); /** * Specifies a name for an expression i.e. a field. * * @param name name for one field * @param extraNames additional names if the expression expands to multiple fields */ public OutType as(String name, String... extraNames) { return toApiSpecificExpression( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.AS, Stream.concat( Stream.of(toExpr(), ApiExpressionUtils.valueLiteral(name)), Stream.of(extraNames).map(ApiExpressionUtils::valueLiteral)) .toArray(Expression[]::new))); } /** * Boolean AND in three-valued logic. This is an infix notation. See also {@link * Expressions * * @see Expressions */ public OutType and(InType other) { return toApiSpecificExpression(unresolvedCall(AND, toExpr(), objectToExpression(other))); } /** * Boolean OR in three-valued logic. This is an infix notation. See also {@link * Expressions * * @see Expressions */ public OutType or(InType other) { return toApiSpecificExpression(unresolvedCall(OR, toExpr(), objectToExpression(other))); } /** * Inverts a given boolean expression. * * <p>This method supports a three-valued logic by preserving {@code NULL}. This means if the * input expression is {@code NULL}, the result will also be {@code NULL}. * * <p>The resulting type is nullable if and only if the input type is nullable. * * <p>Examples: * * <pre>{@code * lit(true).not() * lit(false).not() * lit(null, DataTypes.BOOLEAN()).not() * }</pre> */ public OutType not() { return toApiSpecificExpression(unresolvedCall(NOT, toExpr())); } /** Greater than. */ public OutType isGreater(InType other) { return toApiSpecificExpression( unresolvedCall(GREATER_THAN, toExpr(), objectToExpression(other))); } /** Greater than or equal. */ public OutType isGreaterOrEqual(InType other) { return toApiSpecificExpression( unresolvedCall(GREATER_THAN_OR_EQUAL, toExpr(), objectToExpression(other))); } /** Less than. */ public OutType isLess(InType other) { return toApiSpecificExpression( unresolvedCall(LESS_THAN, toExpr(), objectToExpression(other))); } /** Less than or equal. */ public OutType isLessOrEqual(InType other) { return toApiSpecificExpression( unresolvedCall(LESS_THAN_OR_EQUAL, toExpr(), objectToExpression(other))); } /** Equals. */ public OutType isEqual(InType other) { return toApiSpecificExpression(unresolvedCall(EQUALS, toExpr(), objectToExpression(other))); } /** Not equal. */ public OutType isNotEqual(InType other) { return toApiSpecificExpression( unresolvedCall(NOT_EQUALS, toExpr(), objectToExpression(other))); } /** Returns left plus right. */ public OutType plus(InType other) { return toApiSpecificExpression(unresolvedCall(PLUS, toExpr(), objectToExpression(other))); } /** Returns left minus right. */ public OutType minus(InType other) { return toApiSpecificExpression(unresolvedCall(MINUS, toExpr(), objectToExpression(other))); } /** Returns left divided by right. */ public OutType dividedBy(InType other) { return toApiSpecificExpression(unresolvedCall(DIVIDE, toExpr(), objectToExpression(other))); } /** Returns left multiplied by right. */ public OutType times(InType other) { return toApiSpecificExpression(unresolvedCall(TIMES, toExpr(), objectToExpression(other))); } /** * Returns true if the given expression is between lowerBound and upperBound (both inclusive). * False otherwise. The parameters must be numeric types or identical comparable types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression */ public OutType between(InType lowerBound, InType upperBound) { return toApiSpecificExpression( unresolvedCall( BETWEEN, toExpr(), objectToExpression(lowerBound), objectToExpression(upperBound))); } /** * Returns true if the given expression is not between lowerBound and upperBound (both * inclusive). False otherwise. The parameters must be numeric types or identical comparable * types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression */ public OutType notBetween(InType lowerBound, InType upperBound) { return toApiSpecificExpression( unresolvedCall( NOT_BETWEEN, toExpr(), objectToExpression(lowerBound), objectToExpression(upperBound))); } /** * Ternary conditional operator that decides which of two other expressions should be evaluated * based on a evaluated boolean condition. * * <p>e.g. lit(42).isGreater(5).then("A", "B") leads to "A" * * @param ifTrue expression to be evaluated if condition holds * @param ifFalse expression to be evaluated if condition does not hold */ public OutType then(InType ifTrue, InType ifFalse) { return toApiSpecificExpression( unresolvedCall( IF, toExpr(), objectToExpression(ifTrue), objectToExpression(ifFalse))); } /** * Returns {@code nullReplacement} if the given expression is NULL; otherwise the expression is * returned. * * <p>This function returns a data type that is very specific in terms of nullability. The * returned type is the common type of both arguments but only nullable if the {@code * nullReplacement} is nullable. * * <p>The function allows to pass nullable columns into a function or table that is declared * with a NOT NULL constraint. * * <p>E.g., <code>$('nullable_column').ifNull(5)</code> returns never NULL. */ public OutType ifNull(InType nullReplacement) { return toApiSpecificExpression( unresolvedCall(IF_NULL, toExpr(), objectToExpression(nullReplacement))); } /** Returns true if the given expression is null. */ public OutType isNull() { return toApiSpecificExpression(unresolvedCall(IS_NULL, toExpr())); } /** Returns true if the given expression is not null. */ public OutType isNotNull() { return toApiSpecificExpression(unresolvedCall(IS_NOT_NULL, toExpr())); } /** Returns true if given boolean expression is true. False otherwise (for null and false). */ public OutType isTrue() { return toApiSpecificExpression(unresolvedCall(IS_TRUE, toExpr())); } /** Returns true if given boolean expression is false. False otherwise (for null and true). */ public OutType isFalse() { return toApiSpecificExpression(unresolvedCall(IS_FALSE, toExpr())); } /** * Returns true if given boolean expression is not true (for null and false). False otherwise. */ public OutType isNotTrue() { return toApiSpecificExpression(unresolvedCall(IS_NOT_TRUE, toExpr())); } /** * Returns true if given boolean expression is not false (for null and true). False otherwise. */ public OutType isNotFalse() { return toApiSpecificExpression(unresolvedCall(IS_NOT_FALSE, toExpr())); } /** * Similar to a SQL distinct aggregation clause such as COUNT(DISTINCT a), declares that an * aggregation function is only applied on distinct input values. * * <p>For example: * * <pre>{@code * orders * .groupBy($("a")) * .select($("a"), $("b").sum().distinct().as("d")) * }</pre> */ public OutType distinct() { return toApiSpecificExpression(unresolvedCall(DISTINCT, toExpr())); } /** * Returns the sum of the numeric field across all input values. If all values are null, null is * returned. */ public OutType sum() { return toApiSpecificExpression(unresolvedCall(SUM, toExpr())); } /** * Returns the sum of the numeric field across all input values. If all values are null, 0 is * returned. */ public OutType sum0() { return toApiSpecificExpression(unresolvedCall(SUM0, toExpr())); } /** Returns the minimum value of field across all input values. */ public OutType min() { return toApiSpecificExpression(unresolvedCall(MIN, toExpr())); } /** Returns the maximum value of field across all input values. */ public OutType max() { return toApiSpecificExpression(unresolvedCall(MAX, toExpr())); } /** Returns the number of input rows for which the field is not null. */ public OutType count() { return toApiSpecificExpression(unresolvedCall(COUNT, toExpr())); } /** Returns the average (arithmetic mean) of the numeric field across all input values. */ public OutType avg() { return toApiSpecificExpression(unresolvedCall(AVG, toExpr())); } /** Returns the first value of field across all input values. */ public OutType firstValue() { return toApiSpecificExpression(unresolvedCall(FIRST_VALUE, toExpr())); } /** Returns the last value of field across all input values. */ public OutType lastValue() { return toApiSpecificExpression(unresolvedCall(LAST_VALUE, toExpr())); } /** * Concatenates the values of string expressions and places separator(,) values between them. * The separator is not added at the end of string. */ public OutType listAgg() { return toApiSpecificExpression(unresolvedCall(LISTAGG, toExpr(), valueLiteral(","))); } /** * Concatenates the values of string expressions and places separator values between them. The * separator is not added at the end of string. The default value of separator is ‘,’. * * @param separator string containing the character */ public OutType listAgg(String separator) { return toApiSpecificExpression(unresolvedCall(LISTAGG, toExpr(), valueLiteral(separator))); } /** Returns the population standard deviation of an expression (the square root of varPop()). */ public OutType stddevPop() { return toApiSpecificExpression(unresolvedCall(STDDEV_POP, toExpr())); } /** Returns the sample standard deviation of an expression (the square root of varSamp()). */ public OutType stddevSamp() { return toApiSpecificExpression(unresolvedCall(STDDEV_SAMP, toExpr())); } /** Returns the population standard variance of an expression. */ public OutType varPop() { return toApiSpecificExpression(unresolvedCall(VAR_POP, toExpr())); } /** Returns the sample variance of a given expression. */ public OutType varSamp() { return toApiSpecificExpression(unresolvedCall(VAR_SAMP, toExpr())); } /** Returns multiset aggregate of a given expression. */ public OutType collect() { return toApiSpecificExpression(unresolvedCall(COLLECT, toExpr())); } /** * Returns a new value being cast to {@code toType}. A cast error throws an exception and fails * the job. When performing a cast operation that may fail, like {@link DataTypes * {@link DataTypes * errors. If {@link ExecutionConfigOptions * function behaves like {@link * * <p>E.g. {@code "42".cast(DataTypes.INT())} returns {@code 42}; {@code * null.cast(DataTypes.STRING())} returns {@code null} of type {@link DataTypes * {@code "non-number".cast(DataTypes.INT())} throws an exception and fails the job. */ public OutType cast(DataType toType) { return toApiSpecificExpression(unresolvedCall(CAST, toExpr(), typeLiteral(toType))); } /** * Like {@link * the job. * * <p>E.g. {@code "42".tryCast(DataTypes.INT())} returns {@code 42}; {@code * null.tryCast(DataTypes.STRING())} returns {@code null} of type {@link DataTypes * {@code "non-number".tryCast(DataTypes.INT())} returns {@code null} of type {@link * DataTypes * 0} of type {@link DataTypes */ public OutType tryCast(DataType toType) { return toApiSpecificExpression(unresolvedCall(TRY_CAST, toExpr(), typeLiteral(toType))); } /** * @deprecated This method will be removed in future versions as it uses the old type system. It * is recommended to use {@link * based on {@link org.apache.flink.table.api.DataTypes}. Please make sure to use either the * old or the new type system consistently to avoid unintended behavior. See the website * documentation for more information. */ @Deprecated public OutType cast(TypeInformation<?> toType) { return toApiSpecificExpression( unresolvedCall(CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(toType)))); } /** Specifies ascending order of an expression i.e. a field for orderBy unresolvedCall. */ public OutType asc() { return toApiSpecificExpression(unresolvedCall(ORDER_ASC, toExpr())); } /** Specifies descending order of an expression i.e. a field for orderBy unresolvedCall. */ public OutType desc() { return toApiSpecificExpression(unresolvedCall(ORDER_DESC, toExpr())); } /** * Returns true if an expression exists in a given list of expressions. This is a shorthand for * multiple OR conditions. * * <p>If the testing set contains null, the result will be null if the element can not be found * and true if it can be found. If the element is null, the result is always null. * * <p>e.g. lit("42").in(1, 2, 3) leads to false. */ @SafeVarargs public final OutType in(InType... elements) { Expression[] args = Stream.concat( Stream.of(toExpr()), Arrays.stream(elements).map(ApiExpressionUtils::objectToExpression)) .toArray(Expression[]::new); return toApiSpecificExpression(unresolvedCall(IN, args)); } /** * Returns true if an expression exists in a given table sub-query. The sub-query table must * consist of one column. This column must have the same data type as the expression. * * <p>Note: This operation is not supported in a streaming environment yet. */ public OutType in(Table table) { return toApiSpecificExpression( unresolvedCall(IN, toExpr(), tableRef(table.toString(), table))); } /** Returns the start time (inclusive) of a window when applied on a window reference. */ public OutType start() { return toApiSpecificExpression(unresolvedCall(WINDOW_START, toExpr())); } /** * Returns the end time (exclusive) of a window when applied on a window reference. * * <p>e.g. if a window ends at 10:59:59.999 this property will return 11:00:00.000. */ public OutType end() { return toApiSpecificExpression(unresolvedCall(WINDOW_END, toExpr())); } /** Calculates the remainder of division the given number by another one. */ public OutType mod(InType other) { return toApiSpecificExpression(unresolvedCall(MOD, toExpr(), objectToExpression(other))); } /** Calculates the Euler's number raised to the given power. */ public OutType exp() { return toApiSpecificExpression(unresolvedCall(EXP, toExpr())); } /** Calculates the base 10 logarithm of the given value. */ public OutType log10() { return toApiSpecificExpression(unresolvedCall(LOG10, toExpr())); } /** Calculates the base 2 logarithm of the given value. */ public OutType log2() { return toApiSpecificExpression(unresolvedCall(LOG2, toExpr())); } /** Calculates the natural logarithm of the given value. */ public OutType ln() { return toApiSpecificExpression(unresolvedCall(LN, toExpr())); } /** Calculates the natural logarithm of the given value. */ public OutType log() { return toApiSpecificExpression(unresolvedCall(LOG, toExpr())); } /** Calculates the logarithm of the given value to the given base. */ public OutType log(InType base) { return toApiSpecificExpression(unresolvedCall(LOG, objectToExpression(base), toExpr())); } /** Calculates the given number raised to the power of the other value. */ public OutType power(InType other) { return toApiSpecificExpression(unresolvedCall(POWER, toExpr(), objectToExpression(other))); } /** Calculates the hyperbolic cosine of a given value. */ public OutType cosh() { return toApiSpecificExpression(unresolvedCall(COSH, toExpr())); } /** Calculates the square root of a given value. */ public OutType sqrt() { return toApiSpecificExpression(unresolvedCall(SQRT, toExpr())); } /** Calculates the absolute value of given value. */ public OutType abs() { return toApiSpecificExpression(unresolvedCall(ABS, toExpr())); } /** Calculates the largest integer less than or equal to a given number. */ public OutType floor() { return toApiSpecificExpression(unresolvedCall(FLOOR, toExpr())); } /** Calculates the hyperbolic sine of a given value. */ public OutType sinh() { return toApiSpecificExpression(unresolvedCall(SINH, toExpr())); } /** Calculates the smallest integer greater than or equal to a given number. */ public OutType ceil() { return toApiSpecificExpression(unresolvedCall(CEIL, toExpr())); } /** Calculates the sine of a given number. */ public OutType sin() { return toApiSpecificExpression(unresolvedCall(SIN, toExpr())); } /** Calculates the cosine of a given number. */ public OutType cos() { return toApiSpecificExpression(unresolvedCall(COS, toExpr())); } /** Calculates the tangent of a given number. */ public OutType tan() { return toApiSpecificExpression(unresolvedCall(TAN, toExpr())); } /** Calculates the cotangent of a given number. */ public OutType cot() { return toApiSpecificExpression(unresolvedCall(COT, toExpr())); } /** Calculates the arc sine of a given number. */ public OutType asin() { return toApiSpecificExpression(unresolvedCall(ASIN, toExpr())); } /** Calculates the arc cosine of a given number. */ public OutType acos() { return toApiSpecificExpression(unresolvedCall(ACOS, toExpr())); } /** Calculates the arc tangent of a given number. */ public OutType atan() { return toApiSpecificExpression(unresolvedCall(ATAN, toExpr())); } /** Calculates the hyperbolic tangent of a given number. */ public OutType tanh() { return toApiSpecificExpression(unresolvedCall(TANH, toExpr())); } /** Converts numeric from radians to degrees. */ public OutType degrees() { return toApiSpecificExpression(unresolvedCall(DEGREES, toExpr())); } /** Converts numeric from degrees to radians. */ public OutType radians() { return toApiSpecificExpression(unresolvedCall(RADIANS, toExpr())); } /** Calculates the signum of a given number. */ public OutType sign() { return toApiSpecificExpression(unresolvedCall(SIGN, toExpr())); } /** Rounds the given number to integer places right to the decimal point. */ public OutType round(InType places) { return toApiSpecificExpression(unresolvedCall(ROUND, toExpr(), objectToExpression(places))); } /** * Returns a string representation of an integer numeric value in binary format. Returns null if * numeric is null. E.g. "4" leads to "100", "12" leads to "1100". */ public OutType bin() { return toApiSpecificExpression(unresolvedCall(BIN, toExpr())); } /** * Returns a string representation of an integer numeric value or a string in hex format. * Returns null if numeric or string is null. * * <p>E.g. a numeric 20 leads to "14", a numeric 100 leads to "64", and a string "hello,world" * leads to "68656c6c6f2c776f726c64". */ public OutType hex() { return toApiSpecificExpression(unresolvedCall(HEX, toExpr())); } /** * Returns a number of truncated to n decimal places. If n is 0,the result has no decimal point * or fractional part. n can be negative to cause n digits left of the decimal point of the * value to become zero. E.g. truncate(42.345, 2) to 42.34. */ public OutType truncate(InType n) { return toApiSpecificExpression(unresolvedCall(TRUNCATE, toExpr(), objectToExpression(n))); } /** Returns a number of truncated to 0 decimal places. E.g. truncate(42.345) to 42.0. */ public OutType truncate() { return toApiSpecificExpression(unresolvedCall(TRUNCATE, toExpr())); } /** * Creates a substring of the given string at given index for a given length. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @param length number of characters of the substring */ public OutType substring(InType beginIndex, InType length) { return toApiSpecificExpression( unresolvedCall( SUBSTRING, toExpr(), objectToExpression(beginIndex), objectToExpression(length))); } /** * Creates a substring of the given string beginning at the given index to the end. * * @param beginIndex first character of the substring (starting at 1, inclusive) */ public OutType substring(InType beginIndex) { return toApiSpecificExpression( unresolvedCall(SUBSTRING, toExpr(), objectToExpression(beginIndex))); } /** * Creates a substring of the given string at given index for a given length. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @param length number of characters of the substring */ public OutType substr(InType beginIndex, InType length) { return toApiSpecificExpression( unresolvedCall( SUBSTR, toExpr(), objectToExpression(beginIndex), objectToExpression(length))); } /** * Creates a substring of the given string beginning at the given index to the end. * * @param beginIndex first character of the substring (starting at 1, inclusive) */ public OutType substr(InType beginIndex) { return toApiSpecificExpression( unresolvedCall(SUBSTR, toExpr(), objectToExpression(beginIndex))); } /** Removes leading space characters from the given string. */ public OutType trimLeading() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(false), valueLiteral(" "), toExpr())); } /** * Removes leading characters from the given string. * * @param character string containing the character */ public OutType trimLeading(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(false), objectToExpression(character), toExpr())); } /** Removes trailing space characters from the given string. */ public OutType trimTrailing() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(false), valueLiteral(true), valueLiteral(" "), toExpr())); } /** * Removes trailing characters from the given string. * * @param character string containing the character */ public OutType trimTrailing(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(false), valueLiteral(true), objectToExpression(character), toExpr())); } /** Removes leading and trailing space characters from the given string. */ public OutType trim() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(true), valueLiteral(" "), toExpr())); } /** * Removes leading and trailing characters from the given string. * * @param character string containing the character */ public OutType trim(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(true), objectToExpression(character), toExpr())); } /** * Returns a new string which replaces all the occurrences of the search target with the * replacement string (non-overlapping). */ public OutType replace(InType search, InType replacement) { return toApiSpecificExpression( unresolvedCall( REPLACE, toExpr(), objectToExpression(search), objectToExpression(replacement))); } /** Returns the length of a string. */ public OutType charLength() { return toApiSpecificExpression(unresolvedCall(CHAR_LENGTH, toExpr())); } /** * Returns all of the characters in a string in upper case using the rules of the default * locale. */ public OutType upperCase() { return toApiSpecificExpression(unresolvedCall(UPPER, toExpr())); } /** * Returns all of the characters in a string in lower case using the rules of the default * locale. */ public OutType lowerCase() { return toApiSpecificExpression(unresolvedCall(LOWER, toExpr())); } /** * Converts the initial letter of each word in a string to uppercase. Assumes a string * containing only [A-Za-z0-9], everything else is treated as whitespace. */ public OutType initCap() { return toApiSpecificExpression(unresolvedCall(INIT_CAP, toExpr())); } /** * Returns true, if a string matches the specified LIKE pattern. * * <p>e.g. "Jo_n%" matches all strings that start with "Jo(arbitrary letter)n" */ public OutType like(InType pattern) { return toApiSpecificExpression(unresolvedCall(LIKE, toExpr(), objectToExpression(pattern))); } /** * Returns true, if a string matches the specified SQL regex pattern. * * <p>e.g. "A+" matches all strings that consist of at least one A */ public OutType similar(InType pattern) { return toApiSpecificExpression( unresolvedCall(SIMILAR, toExpr(), objectToExpression(pattern))); } /** * Returns the position of string in an other string starting at 1. Returns 0 if string could * not be found. * * <p>e.g. lit("a").position("bbbbba") leads to 6 */ public OutType position(InType haystack) { return toApiSpecificExpression( unresolvedCall(POSITION, toExpr(), objectToExpression(haystack))); } /** * Returns a string left-padded with the given pad string to a length of len characters. If the * string is longer than len, the return value is shortened to len characters. * * <p>e.g. lit("hi").lpad(4, "??") returns "??hi", lit("hi").lpad(1, '??') returns "h" */ public OutType lpad(InType len, InType pad) { return toApiSpecificExpression( unresolvedCall(LPAD, toExpr(), objectToExpression(len), objectToExpression(pad))); } /** * Returns a string right-padded with the given pad string to a length of len characters. If the * string is longer than len, the return value is shortened to len characters. * * <p>e.g. lit("hi").rpad(4, "??") returns "hi??", lit("hi").rpad(1, '??') returns "h" */ public OutType rpad(InType len, InType pad) { return toApiSpecificExpression( unresolvedCall(RPAD, toExpr(), objectToExpression(len), objectToExpression(pad))); } /** * Defines an aggregation to be used for a previously specified over window. * * <p>For example: * * <pre>{@code * table * .window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w) * .select('c, 'a, 'a.count over 'w, 'a.sum over 'w) * }</pre> */ public OutType over(InType alias) { return toApiSpecificExpression(unresolvedCall(OVER, toExpr(), objectToExpression(alias))); } /** * Replaces a substring of string with a string starting at a position (starting at 1). * * <p>e.g. lit("xxxxxtest").overlay("xxxx", 6) leads to "xxxxxxxxx" */ public OutType overlay(InType newString, InType starting) { return toApiSpecificExpression( unresolvedCall( OVERLAY, toExpr(), objectToExpression(newString), objectToExpression(starting))); } /** * Replaces a substring of string with a string starting at a position (starting at 1). The * length specifies how many characters should be removed. * * <p>e.g. lit("xxxxxtest").overlay("xxxx", 6, 2) leads to "xxxxxxxxxst" */ public OutType overlay(InType newString, InType starting, InType length) { return toApiSpecificExpression( unresolvedCall( OVERLAY, toExpr(), objectToExpression(newString), objectToExpression(starting), objectToExpression(length))); } /** * Returns TRUE if any (possibly empty) substring matches the Java regular expression, otherwise * FALSE. Returns NULL if any of arguments is NULL. */ public OutType regexp(InType regex) { return toApiSpecificExpression(unresolvedCall(REGEXP, toExpr(), objectToExpression(regex))); } /** * Returns a string with all substrings that match the regular expression consecutively being * replaced. */ public OutType regexpReplace(InType regex, InType replacement) { return toApiSpecificExpression( unresolvedCall( REGEXP_REPLACE, toExpr(), objectToExpression(regex), objectToExpression(replacement))); } /** * Returns a string extracted with a specified regular expression and a regex match group index. */ public OutType regexpExtract(InType regex, InType extractIndex) { return toApiSpecificExpression( unresolvedCall( REGEXP_EXTRACT, toExpr(), objectToExpression(regex), objectToExpression(extractIndex))); } /** Returns a string extracted with a specified regular expression. */ public OutType regexpExtract(InType regex) { return toApiSpecificExpression( unresolvedCall(REGEXP_EXTRACT, toExpr(), objectToExpression(regex))); } /** Returns the base string decoded with base64. */ public OutType fromBase64() { return toApiSpecificExpression(unresolvedCall(FROM_BASE64, toExpr())); } /** Returns the base64-encoded result of the input string. */ public OutType toBase64() { return toApiSpecificExpression(unresolvedCall(TO_BASE64, toExpr())); } /** Returns the numeric value of the first character of the input string. */ public OutType ascii() { return toApiSpecificExpression(unresolvedCall(ASCII, toExpr())); } /** Returns the ASCII character result of the input integer. */ public OutType chr() { return toApiSpecificExpression(unresolvedCall(CHR, toExpr())); } /** Decodes the first argument into a String using the provided character set. */ public OutType decode(InType charset) { return toApiSpecificExpression( unresolvedCall(DECODE, toExpr(), objectToExpression(charset))); } /** Encodes the string into a BINARY using the provided character set. */ public OutType encode(InType charset) { return toApiSpecificExpression( unresolvedCall(ENCODE, toExpr(), objectToExpression(charset))); } /** Returns the leftmost integer characters from the input string. */ public OutType left(InType len) { return toApiSpecificExpression(unresolvedCall(LEFT, toExpr(), objectToExpression(len))); } /** Returns the rightmost integer characters from the input string. */ public OutType right(InType len) { return toApiSpecificExpression(unresolvedCall(RIGHT, toExpr(), objectToExpression(len))); } /** Returns the position of the first occurrence of the input string. */ public OutType instr(InType str) { return toApiSpecificExpression(unresolvedCall(INSTR, toExpr(), objectToExpression(str))); } /** Returns the position of the first occurrence in the input string. */ public OutType locate(InType str) { return toApiSpecificExpression(unresolvedCall(LOCATE, toExpr(), objectToExpression(str))); } /** Returns the position of the first occurrence in the input string after position integer. */ public OutType locate(InType str, InType pos) { return toApiSpecificExpression( unresolvedCall(LOCATE, toExpr(), objectToExpression(str), objectToExpression(pos))); } /** * Parse url and return various parameter of the URL. If accept any null arguments, return null. */ public OutType parseUrl(InType partToExtract) { return toApiSpecificExpression( unresolvedCall(PARSE_URL, toExpr(), objectToExpression(partToExtract))); } /** * Parse url and return various parameter of the URL. If accept any null arguments, return null. */ public OutType parseUrl(InType partToExtract, InType key) { return toApiSpecificExpression( unresolvedCall( PARSE_URL, toExpr(), objectToExpression(partToExtract), objectToExpression(key))); } /** Returns a string that removes the left whitespaces from the given string. */ public OutType ltrim() { return toApiSpecificExpression(unresolvedCall(LTRIM, toExpr())); } /** Returns a string that removes the right whitespaces from the given string. */ public OutType rtrim() { return toApiSpecificExpression(unresolvedCall(RTRIM, toExpr())); } /** Returns a string that repeats the base string n times. */ public OutType repeat(InType n) { return toApiSpecificExpression(unresolvedCall(REPEAT, toExpr(), objectToExpression(n))); } /** * Reverse each character in current string. * * @return a new string which character order is reverse to current string. */ public OutType reverse() { return toApiSpecificExpression(unresolvedCall(REVERSE, toExpr())); } /** * Split target string with custom separator and pick the index-th(start with 0) result. * * @param separator custom separator. * @param index index of the result which you want. * @return the string at the index of split results. */ public OutType splitIndex(InType separator, InType index) { return toApiSpecificExpression( unresolvedCall( SPLIT_INDEX, toExpr(), objectToExpression(separator), objectToExpression(index))); } /** * Creates a map by parsing text. Split text into key-value pairs using two delimiters. The * first delimiter separates pairs, and the second delimiter separates key and value. If only * one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2. * Both delimiters are treated as regular expressions. * * @return the map */ public OutType strToMap() { return toApiSpecificExpression(unresolvedCall(STR_TO_MAP, toExpr())); } /** * Creates a map by parsing text. Split text into key-value pairs using two delimiters. The * first delimiter separates pairs, and the second delimiter separates key and value. Both * {@code listDelimiter} and {@code keyValueDelimiter} are treated as regular expressions. * * @param listDelimiter the delimiter to separates pairs * @param keyValueDelimiter the delimiter to separates key and value * @return the map */ public OutType strToMap(InType listDelimiter, InType keyValueDelimiter) { return toApiSpecificExpression( unresolvedCall( STR_TO_MAP, toExpr(), objectToExpression(listDelimiter), objectToExpression(keyValueDelimiter))); } /** Parses a date string in the form "yyyy-MM-dd" to a SQL Date. */ public OutType toDate() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.DATE)))); } /** Parses a time string in the form "HH:mm:ss" to a SQL Time. */ public OutType toTime() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.TIME)))); } /** Parses a timestamp string in the form "yyyy-MM-dd HH:mm:ss[.SSS]" to a SQL Timestamp. */ public OutType toTimestamp() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.TIMESTAMP)))); } /** * Extracts parts of a time point or time interval. Returns the part as a long value. * * <p>e.g. lit("2006-06-05").toDate().extract(DAY) leads to 5 */ public OutType extract(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(EXTRACT, valueLiteral(timeIntervalUnit), toExpr())); } /** * Rounds down a time point to the given unit. * * <p>e.g. lit("12:44:31").toDate().floor(MINUTE) leads to 12:44:00 */ public OutType floor(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(FLOOR, toExpr(), valueLiteral(timeIntervalUnit))); } /** * Rounds up a time point to the given unit. * * <p>e.g. lit("12:44:31").toDate().ceil(MINUTE) leads to 12:45:00 */ public OutType ceil(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(CEIL, toExpr(), valueLiteral(timeIntervalUnit))); } /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by name and returns * it's value. * * @param name name of the field (similar to Flink's field expressions) */ public OutType get(String name) { return toApiSpecificExpression(unresolvedCall(GET, toExpr(), valueLiteral(name))); } /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by index and returns * it's value. * * @param index position of the field */ public OutType get(int index) { return toApiSpecificExpression(unresolvedCall(GET, toExpr(), valueLiteral(index))); } /** * Converts a Flink composite type (such as Tuple, POJO, etc.) and all of its direct subtypes * into a flat representation where every subtype is a separate field. */ public OutType flatten() { return toApiSpecificExpression(unresolvedCall(FLATTEN, toExpr())); } /** * Accesses the element of an array or map based on a key or an index (starting at 1). * * @param index key or position of the element (array index starting at 1) */ public OutType at(InType index) { return toApiSpecificExpression(unresolvedCall(AT, toExpr(), objectToExpression(index))); } /** Returns the number of elements of an array or number of entries of a map. */ public OutType cardinality() { return toApiSpecificExpression(unresolvedCall(CARDINALITY, toExpr())); } /** * Returns the sole element of an array with a single element. Returns null if the array is * empty. Throws an exception if the array has more than one element. */ public OutType element() { return toApiSpecificExpression(unresolvedCall(ARRAY_ELEMENT, toExpr())); } /** * Returns whether the given element exists in an array. * * <p>Checking for null elements in the array is supported. If the array itself is null, the * function will return null. The given element is cast implicitly to the array's element type * if necessary. */ public OutType arrayContains(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_CONTAINS, toExpr(), objectToExpression(needle))); } /** * Returns an array with unique elements. * * <p>If the array itself is null, the function will return null. Keeps ordering of elements. */ public OutType arrayDistinct() { return toApiSpecificExpression(unresolvedCall(ARRAY_DISTINCT, toExpr())); } /** * Returns the position of the first occurrence of element in the given array as int. Returns 0 * if the given value could not be found in the array. Returns null if either of the arguments * are null * * <p>NOTE: that this is not zero based, but 1-based index. The first element in the array has * index 1. */ public OutType arrayPosition(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_POSITION, toExpr(), objectToExpression(needle))); } /** * Removes all elements that equal to element from array. * * <p>If the array itself is null, the function will return null. Keeps ordering of elements. */ public OutType arrayRemove(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_REMOVE, toExpr(), objectToExpression(needle))); } /** * Returns an array in reverse order. * * <p>If the array itself is null, the function will return null. */ public OutType arrayReverse() { return toApiSpecificExpression(unresolvedCall(ARRAY_REVERSE, toExpr())); } /** * Returns an array of the elements in the union of array1 and array2, without duplicates. * * <p>If any of the array is null, the function will return null. */ public OutType arrayUnion(InType array) { return toApiSpecificExpression( unresolvedCall(ARRAY_UNION, toExpr(), objectToExpression(array))); } /** * Returns an array of the elements in the concat of array1 and array2, without duplicates. * * <p>If both of the array are null, the function will return null. */ public OutType arrayConcat(InType... arrays) { arrays = convertToArrays(arrays); Expression[] args = Stream.concat( Stream.of(toExpr()), Arrays.stream(arrays).map(ApiExpressionUtils::objectToExpression)) .toArray(Expression[]::new); return toApiSpecificExpression(unresolvedCall(ARRAY_CONCAT, args)); } /** Returns the keys of the map as an array. */ public OutType mapKeys() { return toApiSpecificExpression(unresolvedCall(MAP_KEYS, toExpr())); } /** Returns the values of the map as an array. */ public OutType mapValues() { return toApiSpecificExpression(unresolvedCall(MAP_VALUES, toExpr())); } /** * Declares a field as the rowtime attribute for indicating, accessing, and working in Flink's * event time. */ public OutType rowtime() { return toApiSpecificExpression(unresolvedCall(ROWTIME, toExpr())); } /** * Declares a field as the proctime attribute for indicating, accessing, and working in Flink's * processing time. */ public OutType proctime() { return toApiSpecificExpression(unresolvedCall(PROCTIME, toExpr())); } /** * Creates an interval of the given number of years. * * <p>The produced expression is of type {@code DataTypes.INTERVAL} */ public OutType year() { return toApiSpecificExpression(toMonthInterval(toExpr(), 12)); } /** Creates an interval of the given number of years. */ public OutType years() { return year(); } /** Creates an interval of the given number of quarters. */ public OutType quarter() { return toApiSpecificExpression(toMonthInterval(toExpr(), 3)); } /** Creates an interval of the given number of quarters. */ public OutType quarters() { return quarter(); } /** Creates an interval of the given number of months. */ public OutType month() { return toApiSpecificExpression(toMonthInterval(toExpr(), 1)); } /** Creates an interval of the given number of months. */ public OutType months() { return month(); } /** Creates an interval of the given number of weeks. */ public OutType week() { return toApiSpecificExpression(toMilliInterval(toExpr(), 7 * MILLIS_PER_DAY)); } /** Creates an interval of the given number of weeks. */ public OutType weeks() { return week(); } /** Creates an interval of the given number of days. */ public OutType day() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_DAY)); } /** Creates an interval of the given number of days. */ public OutType days() { return day(); } /** Creates an interval of the given number of hours. */ public OutType hour() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_HOUR)); } /** Creates an interval of the given number of hours. */ public OutType hours() { return hour(); } /** Creates an interval of the given number of minutes. */ public OutType minute() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_MINUTE)); } /** Creates an interval of the given number of minutes. */ public OutType minutes() { return minute(); } /** Creates an interval of the given number of seconds. */ public OutType second() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_SECOND)); } /** Creates an interval of the given number of seconds. */ public OutType seconds() { return second(); } /** Creates an interval of the given number of milliseconds. */ public OutType milli() { return toApiSpecificExpression(toMilliInterval(toExpr(), 1)); } /** Creates an interval of the given number of milliseconds. */ public OutType millis() { return milli(); } /** * Returns the MD5 hash of the string argument; null if string is null. * * @return string of 32 hexadecimal digits or null */ public OutType md5() { return toApiSpecificExpression(unresolvedCall(MD5, toExpr())); } /** * Returns the SHA-1 hash of the string argument; null if string is null. * * @return string of 40 hexadecimal digits or null */ public OutType sha1() { return toApiSpecificExpression(unresolvedCall(SHA1, toExpr())); } /** * Returns the SHA-224 hash of the string argument; null if string is null. * * @return string of 56 hexadecimal digits or null */ public OutType sha224() { return toApiSpecificExpression(unresolvedCall(SHA224, toExpr())); } /** * Returns the SHA-256 hash of the string argument; null if string is null. * * @return string of 64 hexadecimal digits or null */ public OutType sha256() { return toApiSpecificExpression(unresolvedCall(SHA256, toExpr())); } /** * Returns the SHA-384 hash of the string argument; null if string is null. * * @return string of 96 hexadecimal digits or null */ public OutType sha384() { return toApiSpecificExpression(unresolvedCall(SHA384, toExpr())); } /** * Returns the SHA-512 hash of the string argument; null if string is null. * * @return string of 128 hexadecimal digits or null */ public OutType sha512() { return toApiSpecificExpression(unresolvedCall(SHA512, toExpr())); } /** * Returns the hash for the given string expression using the SHA-2 family of hash functions * (SHA-224, SHA-256, SHA-384, or SHA-512). * * @param hashLength bit length of the result (either 224, 256, 384, or 512) * @return string or null if one of the arguments is null. */ public OutType sha2(InType hashLength) { return toApiSpecificExpression( unresolvedCall(SHA2, toExpr(), objectToExpression(hashLength))); } /** * Determine whether a given string is valid JSON. * * <p>Specifying the optional {@param type} argument puts a constraint on which type of JSON * object is allowed. If the string is valid JSON, but not that type, {@code false} is returned. * The default is {@link JsonType * * <p>Examples: * * <pre>{@code * lit("1").isJson() * lit("[]").isJson() * lit("{}").isJson() * * lit("\"abc\"").isJson() * lit("abc").isJson() * nullOf(DataTypes.STRING()).isJson() * * lit("1").isJson(JsonType.SCALAR) * lit("1").isJson(JsonType.ARRAY) * lit("1").isJson(JsonType.OBJECT) * * lit("{}").isJson(JsonType.SCALAR) * lit("{}").isJson(JsonType.ARRAY) * lit("{}").isJson(JsonType.OBJECT) * }</pre> * * @param type The type of JSON object to validate against. * @return {@code true} if the string is a valid JSON of the given {@param type}, {@code false} * otherwise. */ public OutType isJson(JsonType type) { return toApiSpecificExpression(unresolvedCall(IS_JSON, toExpr(), valueLiteral(type))); } /** * Determine whether a given string is valid JSON. * * <p>This is a shortcut for {@code isJson(JsonType.VALUE)}. See {@link * * @return {@code true} if the string is a valid JSON value, {@code false} otherwise. */ public OutType isJson() { return toApiSpecificExpression(unresolvedCall(IS_JSON, toExpr())); } /** * Returns whether a JSON string satisfies a given search criterion. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonExists("$.a") * * lit("{\"a\": true}").jsonExists("$.b") * * lit("{\"a\": [{ \"b\": 1 }]}").jsonExists("$.a[0].b") * * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.TRUE) * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.FALSE) * }</pre> * * @param path JSON path to search for. * @param onError Behavior in case of an error. * @return {@code true} if the JSON string satisfies the search criterion. */ public OutType jsonExists(String path, JsonExistsOnError onError) { return toApiSpecificExpression( unresolvedCall(JSON_EXISTS, toExpr(), valueLiteral(path), valueLiteral(onError))); } /** * Determines whether a JSON string satisfies a given search criterion. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonExists("$.a") * * lit("{\"a\": true}").jsonExists("$.b") * * lit("{\"a\": [{ \"b\": 1 }]}").jsonExists("$.a[0].b") * * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.TRUE) * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.FALSE) * }</pre> * * @param path JSON path to search for. * @return {@code true} if the JSON string satisfies the search criterion. */ public OutType jsonExists(String path) { return toApiSpecificExpression(unresolvedCall(JSON_EXISTS, toExpr(), valueLiteral(path))); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * be chosen, with the following types being supported: * * <ul> * <li>{@link DataTypes * <li>{@link DataTypes * <li>{@link DataTypes * <li>{@link DataTypes * </ul> * * <p>For empty path expressions or errors a behavior can be defined to either return {@code * null}, raise an error or return a defined default value instead. * * <p>See {@link * JsonQueryOnEmptyOrError)} for extracting non-scalar values from a JSON string. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonValue("$.a") * * * lit("{\"a.b\": [0.998,0.996]}").jsonValue("$.['a.b'][0]", DataTypes.DOUBLE()) * * * lit("{\"a\": true}").jsonValue("$.a", DataTypes.BOOLEAN()) * * * lit("{\"a\": true}").jsonValue("lax $.b", * JsonValueOnEmptyOrError.DEFAULT, false, JsonValueOnEmptyOrError.NULL, null) * * * lit("{\"a\": true}").jsonValue("strict $.b", * JsonValueOnEmptyOrError.NULL, null, JsonValueOnEmptyOrError.DEFAULT, false) * }</pre> * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @param onEmpty Behavior in case the path expression is empty. * @param defaultOnEmpty Default value to return if the path expression is empty and {@param * onEmpty} is set to {@link JsonValueOnEmptyOrError * @param onError Behavior in case of an error. * @param defaultOnError Default value to return if there is an error and {@param onError} is * set to {@link JsonValueOnEmptyOrError * @return The extracted scalar value. */ public OutType jsonValue( String path, DataType returningType, JsonValueOnEmptyOrError onEmpty, InType defaultOnEmpty, JsonValueOnEmptyOrError onError, InType defaultOnError) { return toApiSpecificExpression( unresolvedCall( JSON_VALUE, toExpr(), valueLiteral(path), typeLiteral(returningType), valueLiteral(onEmpty), objectToExpression(defaultOnEmpty), valueLiteral(onError), objectToExpression(defaultOnError))); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @return The extracted scalar value. */ public OutType jsonValue(String path, DataType returningType) { return jsonValue( path, returningType, JsonValueOnEmptyOrError.NULL, null, JsonValueOnEmptyOrError.NULL, null); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * <p>This is a convenience method using {@link JsonValueOnEmptyOrError * and error cases with the same default value. * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @return The extracted scalar value. */ public OutType jsonValue(String path, DataType returningType, InType defaultOnEmptyOrError) { return jsonValue( path, returningType, JsonValueOnEmptyOrError.DEFAULT, defaultOnEmptyOrError, JsonValueOnEmptyOrError.DEFAULT, defaultOnEmptyOrError); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * @param path JSON path to extract. * @return The extracted scalar value. */ public OutType jsonValue(String path) { return jsonValue(path, DataTypes.STRING()); } /** * Extracts JSON values from a JSON string. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. The result is * always returned as a {@link DataTypes * * <p>The {@param wrappingBehavior} determines whether the extracted value should be wrapped * into an array, and whether to do so unconditionally or only if the value itself isn't an * array already. * * <p>{@param onEmpty} and {@param onError} determine the behavior in case the path expression * is empty, or in case an error was raised, respectively. By default, in both cases {@code * null} is returned. Other choices are to use an empty array, an empty object, or to raise an * error. * * <p>See {@link * JsonValueOnEmptyOrError, Object)} for extracting scalars from a JSON string. * * <p>Examples: * * <pre>{@code * lit("{ \"a\": { \"b\": 1 } }").jsonQuery("$.a") * lit("[1, 2]").jsonQuery("$") * nullOf(DataTypes.STRING()).jsonQuery("$") * * * lit("{}").jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * lit("[1, 2]").jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * lit("[1, 2]").jsonQuery("$", JsonQueryWrapper.UNCONDITIONAL_ARRAY) * * * lit(1).jsonQuery("$") * lit(1).jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * * * * lit("{}").jsonQuery("lax $.invalid", JsonQueryWrapper.WITHOUT_ARRAY, * JsonQueryOnEmptyOrError.EMPTY_OBJECT, JsonQueryOnEmptyOrError.NULL) * * lit("{}").jsonQuery("strict $.invalid", JsonQueryWrapper.WITHOUT_ARRAY, * JsonQueryOnEmptyOrError.NULL, JsonQueryOnEmptyOrError.EMPTY_ARRAY) * }</pre> * * @param path JSON path to search for. * @param wrappingBehavior Determine if and when to wrap the resulting value into an array. * @param onEmpty Behavior in case the path expression is empty. * @param onError Behavior in case of an error. * @return The extracted JSON value. */ public OutType jsonQuery( String path, JsonQueryWrapper wrappingBehavior, JsonQueryOnEmptyOrError onEmpty, JsonQueryOnEmptyOrError onError) { return toApiSpecificExpression( unresolvedCall( JSON_QUERY, toExpr(), valueLiteral(path), valueLiteral(wrappingBehavior), valueLiteral(onEmpty), valueLiteral(onError))); } /** * Extracts JSON values from a JSON string. * * <p>The {@param wrappingBehavior} determines whether the extracted value should be wrapped * into an array, and whether to do so unconditionally or only if the value itself isn't an * array already. * * <p>See also {@link * JsonQueryOnEmptyOrError)}. * * @param path JSON path to search for. * @param wrappingBehavior Determine if and when to wrap the resulting value into an array. * @return The extracted JSON value. */ public OutType jsonQuery(String path, JsonQueryWrapper wrappingBehavior) { return jsonQuery( path, wrappingBehavior, JsonQueryOnEmptyOrError.NULL, JsonQueryOnEmptyOrError.NULL); } /** * Extracts JSON values from a JSON string. * * <p>See also {@link * JsonQueryOnEmptyOrError)}. * * @param path JSON path to search for. * @return The extracted JSON value. */ public OutType jsonQuery(String path) { return jsonQuery(path, JsonQueryWrapper.WITHOUT_ARRAY); } }
class BaseExpressions<InType, OutType> { protected abstract Expression toExpr(); protected abstract OutType toApiSpecificExpression(Expression expression); /** * Specifies a name for an expression i.e. a field. * * @param name name for one field * @param extraNames additional names if the expression expands to multiple fields */ public OutType as(String name, String... extraNames) { return toApiSpecificExpression( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.AS, Stream.concat( Stream.of(toExpr(), ApiExpressionUtils.valueLiteral(name)), Stream.of(extraNames).map(ApiExpressionUtils::valueLiteral)) .toArray(Expression[]::new))); } /** * Boolean AND in three-valued logic. This is an infix notation. See also {@link * Expressions * * @see Expressions */ public OutType and(InType other) { return toApiSpecificExpression(unresolvedCall(AND, toExpr(), objectToExpression(other))); } /** * Boolean OR in three-valued logic. This is an infix notation. See also {@link * Expressions * * @see Expressions */ public OutType or(InType other) { return toApiSpecificExpression(unresolvedCall(OR, toExpr(), objectToExpression(other))); } /** * Inverts a given boolean expression. * * <p>This method supports a three-valued logic by preserving {@code NULL}. This means if the * input expression is {@code NULL}, the result will also be {@code NULL}. * * <p>The resulting type is nullable if and only if the input type is nullable. * * <p>Examples: * * <pre>{@code * lit(true).not() * lit(false).not() * lit(null, DataTypes.BOOLEAN()).not() * }</pre> */ public OutType not() { return toApiSpecificExpression(unresolvedCall(NOT, toExpr())); } /** Greater than. */ public OutType isGreater(InType other) { return toApiSpecificExpression( unresolvedCall(GREATER_THAN, toExpr(), objectToExpression(other))); } /** Greater than or equal. */ public OutType isGreaterOrEqual(InType other) { return toApiSpecificExpression( unresolvedCall(GREATER_THAN_OR_EQUAL, toExpr(), objectToExpression(other))); } /** Less than. */ public OutType isLess(InType other) { return toApiSpecificExpression( unresolvedCall(LESS_THAN, toExpr(), objectToExpression(other))); } /** Less than or equal. */ public OutType isLessOrEqual(InType other) { return toApiSpecificExpression( unresolvedCall(LESS_THAN_OR_EQUAL, toExpr(), objectToExpression(other))); } /** Equals. */ public OutType isEqual(InType other) { return toApiSpecificExpression(unresolvedCall(EQUALS, toExpr(), objectToExpression(other))); } /** Not equal. */ public OutType isNotEqual(InType other) { return toApiSpecificExpression( unresolvedCall(NOT_EQUALS, toExpr(), objectToExpression(other))); } /** Returns left plus right. */ public OutType plus(InType other) { return toApiSpecificExpression(unresolvedCall(PLUS, toExpr(), objectToExpression(other))); } /** Returns left minus right. */ public OutType minus(InType other) { return toApiSpecificExpression(unresolvedCall(MINUS, toExpr(), objectToExpression(other))); } /** Returns left divided by right. */ public OutType dividedBy(InType other) { return toApiSpecificExpression(unresolvedCall(DIVIDE, toExpr(), objectToExpression(other))); } /** Returns left multiplied by right. */ public OutType times(InType other) { return toApiSpecificExpression(unresolvedCall(TIMES, toExpr(), objectToExpression(other))); } /** * Returns true if the given expression is between lowerBound and upperBound (both inclusive). * False otherwise. The parameters must be numeric types or identical comparable types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression */ public OutType between(InType lowerBound, InType upperBound) { return toApiSpecificExpression( unresolvedCall( BETWEEN, toExpr(), objectToExpression(lowerBound), objectToExpression(upperBound))); } /** * Returns true if the given expression is not between lowerBound and upperBound (both * inclusive). False otherwise. The parameters must be numeric types or identical comparable * types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression */ public OutType notBetween(InType lowerBound, InType upperBound) { return toApiSpecificExpression( unresolvedCall( NOT_BETWEEN, toExpr(), objectToExpression(lowerBound), objectToExpression(upperBound))); } /** * Ternary conditional operator that decides which of two other expressions should be evaluated * based on a evaluated boolean condition. * * <p>e.g. lit(42).isGreater(5).then("A", "B") leads to "A" * * @param ifTrue expression to be evaluated if condition holds * @param ifFalse expression to be evaluated if condition does not hold */ public OutType then(InType ifTrue, InType ifFalse) { return toApiSpecificExpression( unresolvedCall( IF, toExpr(), objectToExpression(ifTrue), objectToExpression(ifFalse))); } /** * Returns {@code nullReplacement} if the given expression is NULL; otherwise the expression is * returned. * * <p>This function returns a data type that is very specific in terms of nullability. The * returned type is the common type of both arguments but only nullable if the {@code * nullReplacement} is nullable. * * <p>The function allows to pass nullable columns into a function or table that is declared * with a NOT NULL constraint. * * <p>E.g., <code>$('nullable_column').ifNull(5)</code> returns never NULL. */ public OutType ifNull(InType nullReplacement) { return toApiSpecificExpression( unresolvedCall(IF_NULL, toExpr(), objectToExpression(nullReplacement))); } /** Returns true if the given expression is null. */ public OutType isNull() { return toApiSpecificExpression(unresolvedCall(IS_NULL, toExpr())); } /** Returns true if the given expression is not null. */ public OutType isNotNull() { return toApiSpecificExpression(unresolvedCall(IS_NOT_NULL, toExpr())); } /** Returns true if given boolean expression is true. False otherwise (for null and false). */ public OutType isTrue() { return toApiSpecificExpression(unresolvedCall(IS_TRUE, toExpr())); } /** Returns true if given boolean expression is false. False otherwise (for null and true). */ public OutType isFalse() { return toApiSpecificExpression(unresolvedCall(IS_FALSE, toExpr())); } /** * Returns true if given boolean expression is not true (for null and false). False otherwise. */ public OutType isNotTrue() { return toApiSpecificExpression(unresolvedCall(IS_NOT_TRUE, toExpr())); } /** * Returns true if given boolean expression is not false (for null and true). False otherwise. */ public OutType isNotFalse() { return toApiSpecificExpression(unresolvedCall(IS_NOT_FALSE, toExpr())); } /** * Similar to a SQL distinct aggregation clause such as COUNT(DISTINCT a), declares that an * aggregation function is only applied on distinct input values. * * <p>For example: * * <pre>{@code * orders * .groupBy($("a")) * .select($("a"), $("b").sum().distinct().as("d")) * }</pre> */ public OutType distinct() { return toApiSpecificExpression(unresolvedCall(DISTINCT, toExpr())); } /** * Returns the sum of the numeric field across all input values. If all values are null, null is * returned. */ public OutType sum() { return toApiSpecificExpression(unresolvedCall(SUM, toExpr())); } /** * Returns the sum of the numeric field across all input values. If all values are null, 0 is * returned. */ public OutType sum0() { return toApiSpecificExpression(unresolvedCall(SUM0, toExpr())); } /** Returns the minimum value of field across all input values. */ public OutType min() { return toApiSpecificExpression(unresolvedCall(MIN, toExpr())); } /** Returns the maximum value of field across all input values. */ public OutType max() { return toApiSpecificExpression(unresolvedCall(MAX, toExpr())); } /** Returns the number of input rows for which the field is not null. */ public OutType count() { return toApiSpecificExpression(unresolvedCall(COUNT, toExpr())); } /** Returns the average (arithmetic mean) of the numeric field across all input values. */ public OutType avg() { return toApiSpecificExpression(unresolvedCall(AVG, toExpr())); } /** Returns the first value of field across all input values. */ public OutType firstValue() { return toApiSpecificExpression(unresolvedCall(FIRST_VALUE, toExpr())); } /** Returns the last value of field across all input values. */ public OutType lastValue() { return toApiSpecificExpression(unresolvedCall(LAST_VALUE, toExpr())); } /** * Concatenates the values of string expressions and places separator(,) values between them. * The separator is not added at the end of string. */ public OutType listAgg() { return toApiSpecificExpression(unresolvedCall(LISTAGG, toExpr(), valueLiteral(","))); } /** * Concatenates the values of string expressions and places separator values between them. The * separator is not added at the end of string. The default value of separator is ‘,’. * * @param separator string containing the character */ public OutType listAgg(String separator) { return toApiSpecificExpression(unresolvedCall(LISTAGG, toExpr(), valueLiteral(separator))); } /** Returns the population standard deviation of an expression (the square root of varPop()). */ public OutType stddevPop() { return toApiSpecificExpression(unresolvedCall(STDDEV_POP, toExpr())); } /** Returns the sample standard deviation of an expression (the square root of varSamp()). */ public OutType stddevSamp() { return toApiSpecificExpression(unresolvedCall(STDDEV_SAMP, toExpr())); } /** Returns the population standard variance of an expression. */ public OutType varPop() { return toApiSpecificExpression(unresolvedCall(VAR_POP, toExpr())); } /** Returns the sample variance of a given expression. */ public OutType varSamp() { return toApiSpecificExpression(unresolvedCall(VAR_SAMP, toExpr())); } /** Returns multiset aggregate of a given expression. */ public OutType collect() { return toApiSpecificExpression(unresolvedCall(COLLECT, toExpr())); } /** * Returns a new value being cast to {@code toType}. A cast error throws an exception and fails * the job. When performing a cast operation that may fail, like {@link DataTypes * {@link DataTypes * errors. If {@link ExecutionConfigOptions * function behaves like {@link * * <p>E.g. {@code "42".cast(DataTypes.INT())} returns {@code 42}; {@code * null.cast(DataTypes.STRING())} returns {@code null} of type {@link DataTypes * {@code "non-number".cast(DataTypes.INT())} throws an exception and fails the job. */ public OutType cast(DataType toType) { return toApiSpecificExpression(unresolvedCall(CAST, toExpr(), typeLiteral(toType))); } /** * Like {@link * the job. * * <p>E.g. {@code "42".tryCast(DataTypes.INT())} returns {@code 42}; {@code * null.tryCast(DataTypes.STRING())} returns {@code null} of type {@link DataTypes * {@code "non-number".tryCast(DataTypes.INT())} returns {@code null} of type {@link * DataTypes * 0} of type {@link DataTypes */ public OutType tryCast(DataType toType) { return toApiSpecificExpression(unresolvedCall(TRY_CAST, toExpr(), typeLiteral(toType))); } /** * @deprecated This method will be removed in future versions as it uses the old type system. It * is recommended to use {@link * based on {@link org.apache.flink.table.api.DataTypes}. Please make sure to use either the * old or the new type system consistently to avoid unintended behavior. See the website * documentation for more information. */ @Deprecated public OutType cast(TypeInformation<?> toType) { return toApiSpecificExpression( unresolvedCall(CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(toType)))); } /** Specifies ascending order of an expression i.e. a field for orderBy unresolvedCall. */ public OutType asc() { return toApiSpecificExpression(unresolvedCall(ORDER_ASC, toExpr())); } /** Specifies descending order of an expression i.e. a field for orderBy unresolvedCall. */ public OutType desc() { return toApiSpecificExpression(unresolvedCall(ORDER_DESC, toExpr())); } /** * Returns true if an expression exists in a given list of expressions. This is a shorthand for * multiple OR conditions. * * <p>If the testing set contains null, the result will be null if the element can not be found * and true if it can be found. If the element is null, the result is always null. * * <p>e.g. lit("42").in(1, 2, 3) leads to false. */ @SafeVarargs public final OutType in(InType... elements) { Expression[] args = Stream.concat( Stream.of(toExpr()), Arrays.stream(elements).map(ApiExpressionUtils::objectToExpression)) .toArray(Expression[]::new); return toApiSpecificExpression(unresolvedCall(IN, args)); } /** * Returns true if an expression exists in a given table sub-query. The sub-query table must * consist of one column. This column must have the same data type as the expression. * * <p>Note: This operation is not supported in a streaming environment yet. */ public OutType in(Table table) { return toApiSpecificExpression( unresolvedCall(IN, toExpr(), tableRef(table.toString(), table))); } /** Returns the start time (inclusive) of a window when applied on a window reference. */ public OutType start() { return toApiSpecificExpression(unresolvedCall(WINDOW_START, toExpr())); } /** * Returns the end time (exclusive) of a window when applied on a window reference. * * <p>e.g. if a window ends at 10:59:59.999 this property will return 11:00:00.000. */ public OutType end() { return toApiSpecificExpression(unresolvedCall(WINDOW_END, toExpr())); } /** Calculates the remainder of division the given number by another one. */ public OutType mod(InType other) { return toApiSpecificExpression(unresolvedCall(MOD, toExpr(), objectToExpression(other))); } /** Calculates the Euler's number raised to the given power. */ public OutType exp() { return toApiSpecificExpression(unresolvedCall(EXP, toExpr())); } /** Calculates the base 10 logarithm of the given value. */ public OutType log10() { return toApiSpecificExpression(unresolvedCall(LOG10, toExpr())); } /** Calculates the base 2 logarithm of the given value. */ public OutType log2() { return toApiSpecificExpression(unresolvedCall(LOG2, toExpr())); } /** Calculates the natural logarithm of the given value. */ public OutType ln() { return toApiSpecificExpression(unresolvedCall(LN, toExpr())); } /** Calculates the natural logarithm of the given value. */ public OutType log() { return toApiSpecificExpression(unresolvedCall(LOG, toExpr())); } /** Calculates the logarithm of the given value to the given base. */ public OutType log(InType base) { return toApiSpecificExpression(unresolvedCall(LOG, objectToExpression(base), toExpr())); } /** Calculates the given number raised to the power of the other value. */ public OutType power(InType other) { return toApiSpecificExpression(unresolvedCall(POWER, toExpr(), objectToExpression(other))); } /** Calculates the hyperbolic cosine of a given value. */ public OutType cosh() { return toApiSpecificExpression(unresolvedCall(COSH, toExpr())); } /** Calculates the square root of a given value. */ public OutType sqrt() { return toApiSpecificExpression(unresolvedCall(SQRT, toExpr())); } /** Calculates the absolute value of given value. */ public OutType abs() { return toApiSpecificExpression(unresolvedCall(ABS, toExpr())); } /** Calculates the largest integer less than or equal to a given number. */ public OutType floor() { return toApiSpecificExpression(unresolvedCall(FLOOR, toExpr())); } /** Calculates the hyperbolic sine of a given value. */ public OutType sinh() { return toApiSpecificExpression(unresolvedCall(SINH, toExpr())); } /** Calculates the smallest integer greater than or equal to a given number. */ public OutType ceil() { return toApiSpecificExpression(unresolvedCall(CEIL, toExpr())); } /** Calculates the sine of a given number. */ public OutType sin() { return toApiSpecificExpression(unresolvedCall(SIN, toExpr())); } /** Calculates the cosine of a given number. */ public OutType cos() { return toApiSpecificExpression(unresolvedCall(COS, toExpr())); } /** Calculates the tangent of a given number. */ public OutType tan() { return toApiSpecificExpression(unresolvedCall(TAN, toExpr())); } /** Calculates the cotangent of a given number. */ public OutType cot() { return toApiSpecificExpression(unresolvedCall(COT, toExpr())); } /** Calculates the arc sine of a given number. */ public OutType asin() { return toApiSpecificExpression(unresolvedCall(ASIN, toExpr())); } /** Calculates the arc cosine of a given number. */ public OutType acos() { return toApiSpecificExpression(unresolvedCall(ACOS, toExpr())); } /** Calculates the arc tangent of a given number. */ public OutType atan() { return toApiSpecificExpression(unresolvedCall(ATAN, toExpr())); } /** Calculates the hyperbolic tangent of a given number. */ public OutType tanh() { return toApiSpecificExpression(unresolvedCall(TANH, toExpr())); } /** Converts numeric from radians to degrees. */ public OutType degrees() { return toApiSpecificExpression(unresolvedCall(DEGREES, toExpr())); } /** Converts numeric from degrees to radians. */ public OutType radians() { return toApiSpecificExpression(unresolvedCall(RADIANS, toExpr())); } /** Calculates the signum of a given number. */ public OutType sign() { return toApiSpecificExpression(unresolvedCall(SIGN, toExpr())); } /** Rounds the given number to integer places right to the decimal point. */ public OutType round(InType places) { return toApiSpecificExpression(unresolvedCall(ROUND, toExpr(), objectToExpression(places))); } /** * Returns a string representation of an integer numeric value in binary format. Returns null if * numeric is null. E.g. "4" leads to "100", "12" leads to "1100". */ public OutType bin() { return toApiSpecificExpression(unresolvedCall(BIN, toExpr())); } /** * Returns a string representation of an integer numeric value or a string in hex format. * Returns null if numeric or string is null. * * <p>E.g. a numeric 20 leads to "14", a numeric 100 leads to "64", and a string "hello,world" * leads to "68656c6c6f2c776f726c64". */ public OutType hex() { return toApiSpecificExpression(unresolvedCall(HEX, toExpr())); } /** * Returns a number of truncated to n decimal places. If n is 0,the result has no decimal point * or fractional part. n can be negative to cause n digits left of the decimal point of the * value to become zero. E.g. truncate(42.345, 2) to 42.34. */ public OutType truncate(InType n) { return toApiSpecificExpression(unresolvedCall(TRUNCATE, toExpr(), objectToExpression(n))); } /** Returns a number of truncated to 0 decimal places. E.g. truncate(42.345) to 42.0. */ public OutType truncate() { return toApiSpecificExpression(unresolvedCall(TRUNCATE, toExpr())); } /** * Creates a substring of the given string at given index for a given length. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @param length number of characters of the substring */ public OutType substring(InType beginIndex, InType length) { return toApiSpecificExpression( unresolvedCall( SUBSTRING, toExpr(), objectToExpression(beginIndex), objectToExpression(length))); } /** * Creates a substring of the given string beginning at the given index to the end. * * @param beginIndex first character of the substring (starting at 1, inclusive) */ public OutType substring(InType beginIndex) { return toApiSpecificExpression( unresolvedCall(SUBSTRING, toExpr(), objectToExpression(beginIndex))); } /** * Creates a substring of the given string at given index for a given length. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @param length number of characters of the substring */ public OutType substr(InType beginIndex, InType length) { return toApiSpecificExpression( unresolvedCall( SUBSTR, toExpr(), objectToExpression(beginIndex), objectToExpression(length))); } /** * Creates a substring of the given string beginning at the given index to the end. * * @param beginIndex first character of the substring (starting at 1, inclusive) */ public OutType substr(InType beginIndex) { return toApiSpecificExpression( unresolvedCall(SUBSTR, toExpr(), objectToExpression(beginIndex))); } /** Removes leading space characters from the given string. */ public OutType trimLeading() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(false), valueLiteral(" "), toExpr())); } /** * Removes leading characters from the given string. * * @param character string containing the character */ public OutType trimLeading(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(false), objectToExpression(character), toExpr())); } /** Removes trailing space characters from the given string. */ public OutType trimTrailing() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(false), valueLiteral(true), valueLiteral(" "), toExpr())); } /** * Removes trailing characters from the given string. * * @param character string containing the character */ public OutType trimTrailing(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(false), valueLiteral(true), objectToExpression(character), toExpr())); } /** Removes leading and trailing space characters from the given string. */ public OutType trim() { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(true), valueLiteral(" "), toExpr())); } /** * Removes leading and trailing characters from the given string. * * @param character string containing the character */ public OutType trim(InType character) { return toApiSpecificExpression( unresolvedCall( TRIM, valueLiteral(true), valueLiteral(true), objectToExpression(character), toExpr())); } /** * Returns a new string which replaces all the occurrences of the search target with the * replacement string (non-overlapping). */ public OutType replace(InType search, InType replacement) { return toApiSpecificExpression( unresolvedCall( REPLACE, toExpr(), objectToExpression(search), objectToExpression(replacement))); } /** Returns the length of a string. */ public OutType charLength() { return toApiSpecificExpression(unresolvedCall(CHAR_LENGTH, toExpr())); } /** * Returns all of the characters in a string in upper case using the rules of the default * locale. */ public OutType upperCase() { return toApiSpecificExpression(unresolvedCall(UPPER, toExpr())); } /** * Returns all of the characters in a string in lower case using the rules of the default * locale. */ public OutType lowerCase() { return toApiSpecificExpression(unresolvedCall(LOWER, toExpr())); } /** * Converts the initial letter of each word in a string to uppercase. Assumes a string * containing only [A-Za-z0-9], everything else is treated as whitespace. */ public OutType initCap() { return toApiSpecificExpression(unresolvedCall(INIT_CAP, toExpr())); } /** * Returns true, if a string matches the specified LIKE pattern. * * <p>e.g. "Jo_n%" matches all strings that start with "Jo(arbitrary letter)n" */ public OutType like(InType pattern) { return toApiSpecificExpression(unresolvedCall(LIKE, toExpr(), objectToExpression(pattern))); } /** * Returns true, if a string matches the specified SQL regex pattern. * * <p>e.g. "A+" matches all strings that consist of at least one A */ public OutType similar(InType pattern) { return toApiSpecificExpression( unresolvedCall(SIMILAR, toExpr(), objectToExpression(pattern))); } /** * Returns the position of string in an other string starting at 1. Returns 0 if string could * not be found. * * <p>e.g. lit("a").position("bbbbba") leads to 6 */ public OutType position(InType haystack) { return toApiSpecificExpression( unresolvedCall(POSITION, toExpr(), objectToExpression(haystack))); } /** * Returns a string left-padded with the given pad string to a length of len characters. If the * string is longer than len, the return value is shortened to len characters. * * <p>e.g. lit("hi").lpad(4, "??") returns "??hi", lit("hi").lpad(1, '??') returns "h" */ public OutType lpad(InType len, InType pad) { return toApiSpecificExpression( unresolvedCall(LPAD, toExpr(), objectToExpression(len), objectToExpression(pad))); } /** * Returns a string right-padded with the given pad string to a length of len characters. If the * string is longer than len, the return value is shortened to len characters. * * <p>e.g. lit("hi").rpad(4, "??") returns "hi??", lit("hi").rpad(1, '??') returns "h" */ public OutType rpad(InType len, InType pad) { return toApiSpecificExpression( unresolvedCall(RPAD, toExpr(), objectToExpression(len), objectToExpression(pad))); } /** * Defines an aggregation to be used for a previously specified over window. * * <p>For example: * * <pre>{@code * table * .window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w) * .select('c, 'a, 'a.count over 'w, 'a.sum over 'w) * }</pre> */ public OutType over(InType alias) { return toApiSpecificExpression(unresolvedCall(OVER, toExpr(), objectToExpression(alias))); } /** * Replaces a substring of string with a string starting at a position (starting at 1). * * <p>e.g. lit("xxxxxtest").overlay("xxxx", 6) leads to "xxxxxxxxx" */ public OutType overlay(InType newString, InType starting) { return toApiSpecificExpression( unresolvedCall( OVERLAY, toExpr(), objectToExpression(newString), objectToExpression(starting))); } /** * Replaces a substring of string with a string starting at a position (starting at 1). The * length specifies how many characters should be removed. * * <p>e.g. lit("xxxxxtest").overlay("xxxx", 6, 2) leads to "xxxxxxxxxst" */ public OutType overlay(InType newString, InType starting, InType length) { return toApiSpecificExpression( unresolvedCall( OVERLAY, toExpr(), objectToExpression(newString), objectToExpression(starting), objectToExpression(length))); } /** * Returns TRUE if any (possibly empty) substring matches the Java regular expression, otherwise * FALSE. Returns NULL if any of arguments is NULL. */ public OutType regexp(InType regex) { return toApiSpecificExpression(unresolvedCall(REGEXP, toExpr(), objectToExpression(regex))); } /** * Returns a string with all substrings that match the regular expression consecutively being * replaced. */ public OutType regexpReplace(InType regex, InType replacement) { return toApiSpecificExpression( unresolvedCall( REGEXP_REPLACE, toExpr(), objectToExpression(regex), objectToExpression(replacement))); } /** * Returns a string extracted with a specified regular expression and a regex match group index. */ public OutType regexpExtract(InType regex, InType extractIndex) { return toApiSpecificExpression( unresolvedCall( REGEXP_EXTRACT, toExpr(), objectToExpression(regex), objectToExpression(extractIndex))); } /** Returns a string extracted with a specified regular expression. */ public OutType regexpExtract(InType regex) { return toApiSpecificExpression( unresolvedCall(REGEXP_EXTRACT, toExpr(), objectToExpression(regex))); } /** Returns the base string decoded with base64. */ public OutType fromBase64() { return toApiSpecificExpression(unresolvedCall(FROM_BASE64, toExpr())); } /** Returns the base64-encoded result of the input string. */ public OutType toBase64() { return toApiSpecificExpression(unresolvedCall(TO_BASE64, toExpr())); } /** Returns the numeric value of the first character of the input string. */ public OutType ascii() { return toApiSpecificExpression(unresolvedCall(ASCII, toExpr())); } /** Returns the ASCII character result of the input integer. */ public OutType chr() { return toApiSpecificExpression(unresolvedCall(CHR, toExpr())); } /** Decodes the first argument into a String using the provided character set. */ public OutType decode(InType charset) { return toApiSpecificExpression( unresolvedCall(DECODE, toExpr(), objectToExpression(charset))); } /** Encodes the string into a BINARY using the provided character set. */ public OutType encode(InType charset) { return toApiSpecificExpression( unresolvedCall(ENCODE, toExpr(), objectToExpression(charset))); } /** Returns the leftmost integer characters from the input string. */ public OutType left(InType len) { return toApiSpecificExpression(unresolvedCall(LEFT, toExpr(), objectToExpression(len))); } /** Returns the rightmost integer characters from the input string. */ public OutType right(InType len) { return toApiSpecificExpression(unresolvedCall(RIGHT, toExpr(), objectToExpression(len))); } /** Returns the position of the first occurrence of the input string. */ public OutType instr(InType str) { return toApiSpecificExpression(unresolvedCall(INSTR, toExpr(), objectToExpression(str))); } /** Returns the position of the first occurrence in the input string. */ public OutType locate(InType str) { return toApiSpecificExpression(unresolvedCall(LOCATE, toExpr(), objectToExpression(str))); } /** Returns the position of the first occurrence in the input string after position integer. */ public OutType locate(InType str, InType pos) { return toApiSpecificExpression( unresolvedCall(LOCATE, toExpr(), objectToExpression(str), objectToExpression(pos))); } /** * Parse url and return various parameter of the URL. If accept any null arguments, return null. */ public OutType parseUrl(InType partToExtract) { return toApiSpecificExpression( unresolvedCall(PARSE_URL, toExpr(), objectToExpression(partToExtract))); } /** * Parse url and return various parameter of the URL. If accept any null arguments, return null. */ public OutType parseUrl(InType partToExtract, InType key) { return toApiSpecificExpression( unresolvedCall( PARSE_URL, toExpr(), objectToExpression(partToExtract), objectToExpression(key))); } /** Returns a string that removes the left whitespaces from the given string. */ public OutType ltrim() { return toApiSpecificExpression(unresolvedCall(LTRIM, toExpr())); } /** Returns a string that removes the right whitespaces from the given string. */ public OutType rtrim() { return toApiSpecificExpression(unresolvedCall(RTRIM, toExpr())); } /** Returns a string that repeats the base string n times. */ public OutType repeat(InType n) { return toApiSpecificExpression(unresolvedCall(REPEAT, toExpr(), objectToExpression(n))); } /** * Reverse each character in current string. * * @return a new string which character order is reverse to current string. */ public OutType reverse() { return toApiSpecificExpression(unresolvedCall(REVERSE, toExpr())); } /** * Split target string with custom separator and pick the index-th(start with 0) result. * * @param separator custom separator. * @param index index of the result which you want. * @return the string at the index of split results. */ public OutType splitIndex(InType separator, InType index) { return toApiSpecificExpression( unresolvedCall( SPLIT_INDEX, toExpr(), objectToExpression(separator), objectToExpression(index))); } /** * Creates a map by parsing text. Split text into key-value pairs using two delimiters. The * first delimiter separates pairs, and the second delimiter separates key and value. If only * one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2. * Both delimiters are treated as regular expressions. * * @return the map */ public OutType strToMap() { return toApiSpecificExpression(unresolvedCall(STR_TO_MAP, toExpr())); } /** * Creates a map by parsing text. Split text into key-value pairs using two delimiters. The * first delimiter separates pairs, and the second delimiter separates key and value. Both * {@code listDelimiter} and {@code keyValueDelimiter} are treated as regular expressions. * * @param listDelimiter the delimiter to separates pairs * @param keyValueDelimiter the delimiter to separates key and value * @return the map */ public OutType strToMap(InType listDelimiter, InType keyValueDelimiter) { return toApiSpecificExpression( unresolvedCall( STR_TO_MAP, toExpr(), objectToExpression(listDelimiter), objectToExpression(keyValueDelimiter))); } /** Parses a date string in the form "yyyy-MM-dd" to a SQL Date. */ public OutType toDate() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.DATE)))); } /** Parses a time string in the form "HH:mm:ss" to a SQL Time. */ public OutType toTime() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.TIME)))); } /** Parses a timestamp string in the form "yyyy-MM-dd HH:mm:ss[.SSS]" to a SQL Timestamp. */ public OutType toTimestamp() { return toApiSpecificExpression( unresolvedCall( CAST, toExpr(), typeLiteral(fromLegacyInfoToDataType(SqlTimeTypeInfo.TIMESTAMP)))); } /** * Extracts parts of a time point or time interval. Returns the part as a long value. * * <p>e.g. lit("2006-06-05").toDate().extract(DAY) leads to 5 */ public OutType extract(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(EXTRACT, valueLiteral(timeIntervalUnit), toExpr())); } /** * Rounds down a time point to the given unit. * * <p>e.g. lit("12:44:31").toDate().floor(MINUTE) leads to 12:44:00 */ public OutType floor(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(FLOOR, toExpr(), valueLiteral(timeIntervalUnit))); } /** * Rounds up a time point to the given unit. * * <p>e.g. lit("12:44:31").toDate().ceil(MINUTE) leads to 12:45:00 */ public OutType ceil(TimeIntervalUnit timeIntervalUnit) { return toApiSpecificExpression( unresolvedCall(CEIL, toExpr(), valueLiteral(timeIntervalUnit))); } /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by name and returns * it's value. * * @param name name of the field (similar to Flink's field expressions) */ public OutType get(String name) { return toApiSpecificExpression(unresolvedCall(GET, toExpr(), valueLiteral(name))); } /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by index and returns * it's value. * * @param index position of the field */ public OutType get(int index) { return toApiSpecificExpression(unresolvedCall(GET, toExpr(), valueLiteral(index))); } /** * Converts a Flink composite type (such as Tuple, POJO, etc.) and all of its direct subtypes * into a flat representation where every subtype is a separate field. */ public OutType flatten() { return toApiSpecificExpression(unresolvedCall(FLATTEN, toExpr())); } /** * Accesses the element of an array or map based on a key or an index (starting at 1). * * @param index key or position of the element (array index starting at 1) */ public OutType at(InType index) { return toApiSpecificExpression(unresolvedCall(AT, toExpr(), objectToExpression(index))); } /** Returns the number of elements of an array or number of entries of a map. */ public OutType cardinality() { return toApiSpecificExpression(unresolvedCall(CARDINALITY, toExpr())); } /** * Returns the sole element of an array with a single element. Returns null if the array is * empty. Throws an exception if the array has more than one element. */ public OutType element() { return toApiSpecificExpression(unresolvedCall(ARRAY_ELEMENT, toExpr())); } /** * Returns whether the given element exists in an array. * * <p>Checking for null elements in the array is supported. If the array itself is null, the * function will return null. The given element is cast implicitly to the array's element type * if necessary. */ public OutType arrayContains(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_CONTAINS, toExpr(), objectToExpression(needle))); } /** * Returns an array with unique elements. * * <p>If the array itself is null, the function will return null. Keeps ordering of elements. */ public OutType arrayDistinct() { return toApiSpecificExpression(unresolvedCall(ARRAY_DISTINCT, toExpr())); } /** * Returns the position of the first occurrence of element in the given array as int. Returns 0 * if the given value could not be found in the array. Returns null if either of the arguments * are null * * <p>NOTE: that this is not zero based, but 1-based index. The first element in the array has * index 1. */ public OutType arrayPosition(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_POSITION, toExpr(), objectToExpression(needle))); } /** * Removes all elements that equal to element from array. * * <p>If the array itself is null, the function will return null. Keeps ordering of elements. */ public OutType arrayRemove(InType needle) { return toApiSpecificExpression( unresolvedCall(ARRAY_REMOVE, toExpr(), objectToExpression(needle))); } /** * Returns an array in reverse order. * * <p>If the array itself is null, the function will return null. */ public OutType arrayReverse() { return toApiSpecificExpression(unresolvedCall(ARRAY_REVERSE, toExpr())); } /** * Returns an array of the elements in the union of array1 and array2, without duplicates. * * <p>If any of the array is null, the function will return null. */ public OutType arrayUnion(InType array) { return toApiSpecificExpression( unresolvedCall(ARRAY_UNION, toExpr(), objectToExpression(array))); } /** * Returns an array that is the result of concatenating at least one array. This array contains * all the elements in the first array, followed by all the elements in the second array, and so * forth, up to the Nth array. * * <p>If any input array is NULL, the function returns NULL. */ public OutType arrayConcat(InType... arrays) { arrays = convertToArrays(arrays); Expression[] args = Stream.concat( Stream.of(toExpr()), Arrays.stream(arrays).map(ApiExpressionUtils::objectToExpression)) .toArray(Expression[]::new); return toApiSpecificExpression(unresolvedCall(ARRAY_CONCAT, args)); } /** Returns the keys of the map as an array. */ public OutType mapKeys() { return toApiSpecificExpression(unresolvedCall(MAP_KEYS, toExpr())); } /** Returns the values of the map as an array. */ public OutType mapValues() { return toApiSpecificExpression(unresolvedCall(MAP_VALUES, toExpr())); } /** Returns an array of all entries in the given map. */ public OutType mapEntries() { return toApiSpecificExpression(unresolvedCall(MAP_ENTRIES, toExpr())); } /** * Declares a field as the rowtime attribute for indicating, accessing, and working in Flink's * event time. */ public OutType rowtime() { return toApiSpecificExpression(unresolvedCall(ROWTIME, toExpr())); } /** * Declares a field as the proctime attribute for indicating, accessing, and working in Flink's * processing time. */ public OutType proctime() { return toApiSpecificExpression(unresolvedCall(PROCTIME, toExpr())); } /** * Creates an interval of the given number of years. * * <p>The produced expression is of type {@code DataTypes.INTERVAL} */ public OutType year() { return toApiSpecificExpression(toMonthInterval(toExpr(), 12)); } /** Creates an interval of the given number of years. */ public OutType years() { return year(); } /** Creates an interval of the given number of quarters. */ public OutType quarter() { return toApiSpecificExpression(toMonthInterval(toExpr(), 3)); } /** Creates an interval of the given number of quarters. */ public OutType quarters() { return quarter(); } /** Creates an interval of the given number of months. */ public OutType month() { return toApiSpecificExpression(toMonthInterval(toExpr(), 1)); } /** Creates an interval of the given number of months. */ public OutType months() { return month(); } /** Creates an interval of the given number of weeks. */ public OutType week() { return toApiSpecificExpression(toMilliInterval(toExpr(), 7 * MILLIS_PER_DAY)); } /** Creates an interval of the given number of weeks. */ public OutType weeks() { return week(); } /** Creates an interval of the given number of days. */ public OutType day() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_DAY)); } /** Creates an interval of the given number of days. */ public OutType days() { return day(); } /** Creates an interval of the given number of hours. */ public OutType hour() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_HOUR)); } /** Creates an interval of the given number of hours. */ public OutType hours() { return hour(); } /** Creates an interval of the given number of minutes. */ public OutType minute() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_MINUTE)); } /** Creates an interval of the given number of minutes. */ public OutType minutes() { return minute(); } /** Creates an interval of the given number of seconds. */ public OutType second() { return toApiSpecificExpression(toMilliInterval(toExpr(), MILLIS_PER_SECOND)); } /** Creates an interval of the given number of seconds. */ public OutType seconds() { return second(); } /** Creates an interval of the given number of milliseconds. */ public OutType milli() { return toApiSpecificExpression(toMilliInterval(toExpr(), 1)); } /** Creates an interval of the given number of milliseconds. */ public OutType millis() { return milli(); } /** * Returns the MD5 hash of the string argument; null if string is null. * * @return string of 32 hexadecimal digits or null */ public OutType md5() { return toApiSpecificExpression(unresolvedCall(MD5, toExpr())); } /** * Returns the SHA-1 hash of the string argument; null if string is null. * * @return string of 40 hexadecimal digits or null */ public OutType sha1() { return toApiSpecificExpression(unresolvedCall(SHA1, toExpr())); } /** * Returns the SHA-224 hash of the string argument; null if string is null. * * @return string of 56 hexadecimal digits or null */ public OutType sha224() { return toApiSpecificExpression(unresolvedCall(SHA224, toExpr())); } /** * Returns the SHA-256 hash of the string argument; null if string is null. * * @return string of 64 hexadecimal digits or null */ public OutType sha256() { return toApiSpecificExpression(unresolvedCall(SHA256, toExpr())); } /** * Returns the SHA-384 hash of the string argument; null if string is null. * * @return string of 96 hexadecimal digits or null */ public OutType sha384() { return toApiSpecificExpression(unresolvedCall(SHA384, toExpr())); } /** * Returns the SHA-512 hash of the string argument; null if string is null. * * @return string of 128 hexadecimal digits or null */ public OutType sha512() { return toApiSpecificExpression(unresolvedCall(SHA512, toExpr())); } /** * Returns the hash for the given string expression using the SHA-2 family of hash functions * (SHA-224, SHA-256, SHA-384, or SHA-512). * * @param hashLength bit length of the result (either 224, 256, 384, or 512) * @return string or null if one of the arguments is null. */ public OutType sha2(InType hashLength) { return toApiSpecificExpression( unresolvedCall(SHA2, toExpr(), objectToExpression(hashLength))); } /** * Determine whether a given string is valid JSON. * * <p>Specifying the optional {@param type} argument puts a constraint on which type of JSON * object is allowed. If the string is valid JSON, but not that type, {@code false} is returned. * The default is {@link JsonType * * <p>Examples: * * <pre>{@code * lit("1").isJson() * lit("[]").isJson() * lit("{}").isJson() * * lit("\"abc\"").isJson() * lit("abc").isJson() * nullOf(DataTypes.STRING()).isJson() * * lit("1").isJson(JsonType.SCALAR) * lit("1").isJson(JsonType.ARRAY) * lit("1").isJson(JsonType.OBJECT) * * lit("{}").isJson(JsonType.SCALAR) * lit("{}").isJson(JsonType.ARRAY) * lit("{}").isJson(JsonType.OBJECT) * }</pre> * * @param type The type of JSON object to validate against. * @return {@code true} if the string is a valid JSON of the given {@param type}, {@code false} * otherwise. */ public OutType isJson(JsonType type) { return toApiSpecificExpression(unresolvedCall(IS_JSON, toExpr(), valueLiteral(type))); } /** * Determine whether a given string is valid JSON. * * <p>This is a shortcut for {@code isJson(JsonType.VALUE)}. See {@link * * @return {@code true} if the string is a valid JSON value, {@code false} otherwise. */ public OutType isJson() { return toApiSpecificExpression(unresolvedCall(IS_JSON, toExpr())); } /** * Returns whether a JSON string satisfies a given search criterion. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonExists("$.a") * * lit("{\"a\": true}").jsonExists("$.b") * * lit("{\"a\": [{ \"b\": 1 }]}").jsonExists("$.a[0].b") * * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.TRUE) * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.FALSE) * }</pre> * * @param path JSON path to search for. * @param onError Behavior in case of an error. * @return {@code true} if the JSON string satisfies the search criterion. */ public OutType jsonExists(String path, JsonExistsOnError onError) { return toApiSpecificExpression( unresolvedCall(JSON_EXISTS, toExpr(), valueLiteral(path), valueLiteral(onError))); } /** * Determines whether a JSON string satisfies a given search criterion. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonExists("$.a") * * lit("{\"a\": true}").jsonExists("$.b") * * lit("{\"a\": [{ \"b\": 1 }]}").jsonExists("$.a[0].b") * * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.TRUE) * * lit("{\"a\": true}").jsonExists("strict $.b", JsonExistsOnError.FALSE) * }</pre> * * @param path JSON path to search for. * @return {@code true} if the JSON string satisfies the search criterion. */ public OutType jsonExists(String path) { return toApiSpecificExpression(unresolvedCall(JSON_EXISTS, toExpr(), valueLiteral(path))); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * be chosen, with the following types being supported: * * <ul> * <li>{@link DataTypes * <li>{@link DataTypes * <li>{@link DataTypes * <li>{@link DataTypes * </ul> * * <p>For empty path expressions or errors a behavior can be defined to either return {@code * null}, raise an error or return a defined default value instead. * * <p>See {@link * JsonQueryOnEmptyOrError)} for extracting non-scalar values from a JSON string. * * <p>Examples: * * <pre>{@code * * lit("{\"a\": true}").jsonValue("$.a") * * * lit("{\"a.b\": [0.998,0.996]}").jsonValue("$.['a.b'][0]", DataTypes.DOUBLE()) * * * lit("{\"a\": true}").jsonValue("$.a", DataTypes.BOOLEAN()) * * * lit("{\"a\": true}").jsonValue("lax $.b", * JsonValueOnEmptyOrError.DEFAULT, false, JsonValueOnEmptyOrError.NULL, null) * * * lit("{\"a\": true}").jsonValue("strict $.b", * JsonValueOnEmptyOrError.NULL, null, JsonValueOnEmptyOrError.DEFAULT, false) * }</pre> * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @param onEmpty Behavior in case the path expression is empty. * @param defaultOnEmpty Default value to return if the path expression is empty and {@param * onEmpty} is set to {@link JsonValueOnEmptyOrError * @param onError Behavior in case of an error. * @param defaultOnError Default value to return if there is an error and {@param onError} is * set to {@link JsonValueOnEmptyOrError * @return The extracted scalar value. */ public OutType jsonValue( String path, DataType returningType, JsonValueOnEmptyOrError onEmpty, InType defaultOnEmpty, JsonValueOnEmptyOrError onError, InType defaultOnError) { return toApiSpecificExpression( unresolvedCall( JSON_VALUE, toExpr(), valueLiteral(path), typeLiteral(returningType), valueLiteral(onEmpty), objectToExpression(defaultOnEmpty), valueLiteral(onError), objectToExpression(defaultOnError))); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @return The extracted scalar value. */ public OutType jsonValue(String path, DataType returningType) { return jsonValue( path, returningType, JsonValueOnEmptyOrError.NULL, null, JsonValueOnEmptyOrError.NULL, null); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * <p>This is a convenience method using {@link JsonValueOnEmptyOrError * and error cases with the same default value. * * @param path JSON path to extract. * @param returningType Type to convert the extracted scalar to, otherwise defaults to {@link * DataTypes * @return The extracted scalar value. */ public OutType jsonValue(String path, DataType returningType, InType defaultOnEmptyOrError) { return jsonValue( path, returningType, JsonValueOnEmptyOrError.DEFAULT, defaultOnEmptyOrError, JsonValueOnEmptyOrError.DEFAULT, defaultOnEmptyOrError); } /** * Extracts a scalar from a JSON string. * * <p>This method searches a JSON string for a given path expression and returns the value if * the value at that path is scalar. Non-scalar values cannot be returned. By default, the value * is returned as {@link DataTypes * * <p>See also {@link * JsonValueOnEmptyOrError, Object)}. * * @param path JSON path to extract. * @return The extracted scalar value. */ public OutType jsonValue(String path) { return jsonValue(path, DataTypes.STRING()); } /** * Extracts JSON values from a JSON string. * * <p>This follows the ISO/IEC TR 19075-6 specification for JSON support in SQL. The result is * always returned as a {@link DataTypes * * <p>The {@param wrappingBehavior} determines whether the extracted value should be wrapped * into an array, and whether to do so unconditionally or only if the value itself isn't an * array already. * * <p>{@param onEmpty} and {@param onError} determine the behavior in case the path expression * is empty, or in case an error was raised, respectively. By default, in both cases {@code * null} is returned. Other choices are to use an empty array, an empty object, or to raise an * error. * * <p>See {@link * JsonValueOnEmptyOrError, Object)} for extracting scalars from a JSON string. * * <p>Examples: * * <pre>{@code * lit("{ \"a\": { \"b\": 1 } }").jsonQuery("$.a") * lit("[1, 2]").jsonQuery("$") * nullOf(DataTypes.STRING()).jsonQuery("$") * * * lit("{}").jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * lit("[1, 2]").jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * lit("[1, 2]").jsonQuery("$", JsonQueryWrapper.UNCONDITIONAL_ARRAY) * * * lit(1).jsonQuery("$") * lit(1).jsonQuery("$", JsonQueryWrapper.CONDITIONAL_ARRAY) * * * * lit("{}").jsonQuery("lax $.invalid", JsonQueryWrapper.WITHOUT_ARRAY, * JsonQueryOnEmptyOrError.EMPTY_OBJECT, JsonQueryOnEmptyOrError.NULL) * * lit("{}").jsonQuery("strict $.invalid", JsonQueryWrapper.WITHOUT_ARRAY, * JsonQueryOnEmptyOrError.NULL, JsonQueryOnEmptyOrError.EMPTY_ARRAY) * }</pre> * * @param path JSON path to search for. * @param wrappingBehavior Determine if and when to wrap the resulting value into an array. * @param onEmpty Behavior in case the path expression is empty. * @param onError Behavior in case of an error. * @return The extracted JSON value. */ public OutType jsonQuery( String path, JsonQueryWrapper wrappingBehavior, JsonQueryOnEmptyOrError onEmpty, JsonQueryOnEmptyOrError onError) { return toApiSpecificExpression( unresolvedCall( JSON_QUERY, toExpr(), valueLiteral(path), valueLiteral(wrappingBehavior), valueLiteral(onEmpty), valueLiteral(onError))); } /** * Extracts JSON values from a JSON string. * * <p>The {@param wrappingBehavior} determines whether the extracted value should be wrapped * into an array, and whether to do so unconditionally or only if the value itself isn't an * array already. * * <p>See also {@link * JsonQueryOnEmptyOrError)}. * * @param path JSON path to search for. * @param wrappingBehavior Determine if and when to wrap the resulting value into an array. * @return The extracted JSON value. */ public OutType jsonQuery(String path, JsonQueryWrapper wrappingBehavior) { return jsonQuery( path, wrappingBehavior, JsonQueryOnEmptyOrError.NULL, JsonQueryOnEmptyOrError.NULL); } /** * Extracts JSON values from a JSON string. * * <p>See also {@link * JsonQueryOnEmptyOrError)}. * * @param path JSON path to search for. * @return The extracted JSON value. */ public OutType jsonQuery(String path) { return jsonQuery(path, JsonQueryWrapper.WITHOUT_ARRAY); } }
Also make sure to rename the `Build` variable to `build` to follow the Java coding guidelines for variables :wink:
public void testAddAndRemoveExtension() throws IOException, URISyntaxException, InterruptedException { final File projectDir = getProjectDir("add-remove-extension-single-module"); runGradleWrapper(projectDir, ":addExtension", "--extensions=hibernate-orm"); final Path Build = projectDir.toPath().resolve("build.gradle"); assertThat(Build).exists(); assertThat(Files.readString(Build)).contains("implementation 'io.quarkus:quarkus-hibernate-orm'"); runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm"); assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); }
assertThat(Files.readString(Build)).contains("implementation 'io.quarkus:quarkus-hibernate-orm'");
public void testAddAndRemoveExtension() throws IOException, URISyntaxException, InterruptedException { final File projectDir = getProjectDir("add-remove-extension-single-module"); runGradleWrapper(projectDir, ":addExtension", "--extensions=hibernate-orm"); final Path build = projectDir.toPath().resolve("build.gradle"); assertThat(build).exists(); assertThat(new String(Files.readAllBytes(build))).contains("implementation 'io.quarkus:quarkus-hibernate-orm'"); runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm"); assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); }
class AddExtensionToSingleModuleProjectTest extends QuarkusGradleWrapperTestBase { @Test @Test public void testRemoveNonExistentExtension() throws IOException, URISyntaxException, InterruptedException { final File projectDir = getProjectDir("add-remove-extension-single-module"); runGradleWrapper(projectDir, "clean", "build"); final Path Build = projectDir.toPath().resolve("build.gradle"); assertThat(Build).exists(); assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm"); assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); } }
class AddExtensionToSingleModuleProjectTest extends QuarkusGradleWrapperTestBase { @Test @Test public void testRemoveNonExistentExtension() throws IOException, URISyntaxException, InterruptedException { final File projectDir = getProjectDir("add-remove-extension-single-module"); runGradleWrapper(projectDir, "clean", "build"); final Path build = projectDir.toPath().resolve("build.gradle"); assertThat(build).exists(); assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm"); assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'"); } }
Actually, that's not the same thing due to the behavior of `NodeFilter#nextMatches`. Your suggestion would be fine if `NodeFilter#nextMatches` method didn't return `true` for the last filter in the chain. I think the intention is that filters should match a given node if no filters in the chain actively reject it (a bit strange, but other code depends on this behavior).
public boolean matches(Node node) { if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) { return false; } return nextMatches(node); }
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
public boolean matches(Node node) { if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) { return false; } return nextMatches(node); }
class NodeOsVersionFilter extends NodeFilter { private final Version version; private NodeOsVersionFilter(Version version, NodeFilter next) { super(next); this.version = Objects.requireNonNull(version, "version cannot be null"); } @Override public static NodeOsVersionFilter from(String version, NodeFilter filter) { return new NodeOsVersionFilter(Version.fromString(version), filter); } }
class NodeOsVersionFilter extends NodeFilter { private final Version version; private NodeOsVersionFilter(Version version, NodeFilter next) { super(next); this.version = Objects.requireNonNull(version, "version cannot be null"); } @Override public static NodeOsVersionFilter from(String version, NodeFilter filter) { return new NodeOsVersionFilter(Version.fromString(version), filter); } }
@srnagar `String eventHubName()` is not renamed.
private static String getExpression(EventPosition eventPosition) { final String isInclusiveFlag = eventPosition.isInclusive() ? "=" : ""; if (eventPosition.getOffset() != null) { return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, OFFSET_ANNOTATION_NAME.getValue(), isInclusiveFlag, eventPosition.getOffset()); } if (eventPosition.getSequenceNumber() != null) { return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), isInclusiveFlag, eventPosition.getSequenceNumber()); } if (eventPosition.getEnqueuedDateTime() != null) { String ms; try { ms = Long.toString(eventPosition.getEnqueuedDateTime().toEpochMilli()); } catch (ArithmeticException ex) { ms = Long.toString(Long.MAX_VALUE); } return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), isInclusiveFlag, ms); } throw new IllegalArgumentException("No starting position was set."); }
ms = Long.toString(Long.MAX_VALUE);
private static String getExpression(EventPosition eventPosition) { final String isInclusiveFlag = eventPosition.isInclusive() ? "=" : ""; if (eventPosition.getOffset() != null) { return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, OFFSET_ANNOTATION_NAME.getValue(), isInclusiveFlag, eventPosition.getOffset()); } if (eventPosition.getSequenceNumber() != null) { return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), isInclusiveFlag, eventPosition.getSequenceNumber()); } if (eventPosition.getEnqueuedDateTime() != null) { String ms; try { ms = Long.toString(eventPosition.getEnqueuedDateTime().toEpochMilli()); } catch (ArithmeticException ex) { ms = Long.toString(Long.MAX_VALUE); } return String.format( AmqpConstants.AMQP_ANNOTATION_FORMAT, ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), isInclusiveFlag, ms); } throw new IllegalArgumentException("No starting position was set."); }
class EventHubAsyncClient implements Closeable { /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; private static final String RECEIVER_ENTITY_PATH_FORMAT = "%s/ConsumerGroups/%s/Partitions/%s"; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private final ClientLogger logger = new ClientLogger(EventHubAsyncClient.class); private final String connectionId; private final Mono<EventHubConnection> connectionMono; private final AtomicBoolean hasConnection = new AtomicBoolean(false); private final ConnectionOptions connectionOptions; private final String eventHubName; private final EventHubProducerOptions defaultProducerOptions; private final EventHubConsumerOptions defaultConsumerOptions; private final TracerProvider tracerProvider; EventHubAsyncClient(ConnectionOptions connectionOptions, ReactorProvider provider, ReactorHandlerProvider handlerProvider, TracerProvider tracerProvider) { Objects.requireNonNull(connectionOptions, "'connectionOptions' cannot be null."); Objects.requireNonNull(provider, "'provider' cannot be null."); Objects.requireNonNull(handlerProvider, "'handlerProvider' cannot be null."); Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.connectionOptions = connectionOptions; this.tracerProvider = tracerProvider; this.eventHubName = connectionOptions.getEventHubName(); this.connectionId = StringUtil.getRandomString("MF"); this.connectionMono = Mono.fromCallable(() -> { return (EventHubConnection) new ReactorConnection(connectionId, connectionOptions, provider, handlerProvider, new ResponseMapper()); }).doOnSubscribe(c -> hasConnection.set(true)) .cache(); this.defaultProducerOptions = new EventHubProducerOptions() .setRetry(connectionOptions.getRetry()); this.defaultConsumerOptions = new EventHubConsumerOptions() .setRetry(connectionOptions.getRetry()) .setScheduler(connectionOptions.getScheduler()); } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getProperties() { return connectionMono .flatMap(connection -> connection .getManagementNode().flatMap(EventHubManagementNode::getEventHubProperties)); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ @ServiceMethod(returns = ReturnType.COLLECTION) public Flux<String> getPartitionIds() { return getProperties().flatMapMany(properties -> Flux.fromArray(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionMono.flatMap( connection -> connection.getManagementNode().flatMap(node -> { return node.getPartitionProperties(partitionId); })); } /** * Creates an Event Hub producer responsible for transmitting {@link EventData} to the Event Hub, grouped together * in batches. Event data is automatically routed to an available partition. * * @return A new {@link EventHubAsyncProducer}. */ public EventHubAsyncProducer createProducer() { return createProducer(defaultProducerOptions); } /** * Creates an Event Hub producer responsible for transmitting {@link EventData} to the Event Hub, grouped together * in batches. If {@link EventHubProducerOptions * events are routed to that specific partition. Otherwise, events are automatically routed to an available * partition. * * @param options The set of options to apply when creating the producer. * @return A new {@link EventHubAsyncProducer}. * @throws NullPointerException if {@code options} is {@code null}. */ public EventHubAsyncProducer createProducer(EventHubProducerOptions options) { Objects.requireNonNull(options, "'options' cannot be null."); final EventHubProducerOptions clonedOptions = options.clone(); if (clonedOptions.getRetry() == null) { clonedOptions.setRetry(connectionOptions.getRetry()); } final String entityPath; final String linkName; if (ImplUtils.isNullOrEmpty(options.getPartitionId())) { entityPath = eventHubName; linkName = StringUtil.getRandomString("EC"); } else { entityPath = String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, options.getPartitionId()); linkName = StringUtil.getRandomString("PS"); } final Mono<AmqpSendLink> amqpLinkMono = connectionMono .flatMap(connection -> connection.createSession(entityPath)) .flatMap(session -> { logger.verbose("Creating producer for {}", entityPath); final RetryPolicy retryPolicy = RetryUtil.getRetryPolicy(clonedOptions.getRetry()); return session.createProducer(linkName, entityPath, clonedOptions.getRetry().getTryTimeout(), retryPolicy) .cast(AmqpSendLink.class); }); return new EventHubAsyncProducer(amqpLinkMono, clonedOptions, tracerProvider); } /** * Creates an Event Hub consumer responsible for reading {@link EventData} from a specific Event Hub partition, as a * member of the specified consumer group, and begins reading events from the {@code eventPosition}. * * The consumer created is non-exclusive, allowing multiple consumers from the same consumer group to be actively * reading events from the partition. These non-exclusive consumers are sometimes referred to as "Non-epoch * Consumers". * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in * the context of this group. The name of the consumer group that is created by default is {@link * * @param partitionId The identifier of the Event Hub partition. * @param eventPosition The position within the partition where the consumer should begin reading events. * @return A new {@link EventHubAsyncConsumer} that receives events from the partition at the given position. * @throws NullPointerException If {@code eventPosition}, or {@code options} is {@code null}. * @throws IllegalArgumentException If {@code consumerGroup} or {@code partitionId} is {@code null} or an * empty string. */ public EventHubAsyncConsumer createConsumer(String consumerGroup, String partitionId, EventPosition eventPosition) { return createConsumer(consumerGroup, partitionId, eventPosition, defaultConsumerOptions); } /** * Creates an Event Hub consumer responsible for reading {@link EventData} from a specific Event Hub partition, as a * member of the configured consumer group, and begins reading events from the specified {@code eventPosition}. * * <p> * A consumer may be exclusive, which asserts ownership over the partition for the consumer group to ensure that * only one consumer from that group is reading the from the partition. These exclusive consumers are sometimes * referred to as "Epoch Consumers." * * A consumer may also be non-exclusive, allowing multiple consumers from the same consumer group to be actively * reading events from the partition. These non-exclusive consumers are sometimes referred to as "Non-epoch * Consumers." * * Designating a consumer as exclusive may be specified in the {@code options}, by setting {@link * EventHubConsumerOptions * non-exclusive. * </p> * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in * the context of this group. The name of the consumer group that is created by default is {@link * * @param partitionId The identifier of the Event Hub partition from which events will be received. * @param eventPosition The position within the partition where the consumer should begin reading events. * @param options The set of options to apply when creating the consumer. * @return An new {@link EventHubAsyncConsumer} that receives events from the partition with all configured {@link * EventHubConsumerOptions}. * @throws NullPointerException If {@code eventPosition}, {@code consumerGroup}, {@code partitionId}, or * {@code options} is {@code null}. * @throws IllegalArgumentException If {@code consumerGroup} or {@code partitionId} is an empty string. */ public EventHubAsyncConsumer createConsumer(String consumerGroup, String partitionId, EventPosition eventPosition, EventHubConsumerOptions options) { Objects.requireNonNull(eventPosition, "'eventPosition' cannot be null."); Objects.requireNonNull(options, "'options' cannot be null."); Objects.requireNonNull(consumerGroup, "'consumerGroup' cannot be null."); Objects.requireNonNull(partitionId, "'partitionId' cannot be null."); if (consumerGroup.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'consumerGroup' cannot be an empty string.")); } else if (partitionId.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'partitionId' cannot be an empty string.")); } final EventHubConsumerOptions clonedOptions = options.clone(); if (clonedOptions.getScheduler() == null) { clonedOptions.setScheduler(connectionOptions.getScheduler()); } if (clonedOptions.getRetry() == null) { clonedOptions.setRetry(connectionOptions.getRetry()); } final String linkName = StringUtil.getRandomString("PR"); final String entityPath = String.format(Locale.US, RECEIVER_ENTITY_PATH_FORMAT, eventHubName, consumerGroup, partitionId); final Mono<AmqpReceiveLink> receiveLinkMono = connectionMono.flatMap(connection -> { return connection.createSession(entityPath).cast(EventHubSession.class); }).flatMap(session -> { logger.verbose("Creating consumer for path: {}", entityPath); final RetryPolicy retryPolicy = RetryUtil.getRetryPolicy(clonedOptions.getRetry()); return session.createConsumer(linkName, entityPath, getExpression(eventPosition), clonedOptions.getRetry().getTryTimeout(), retryPolicy, options.getOwnerLevel(), options.getIdentifier()) .cast(AmqpReceiveLink.class); }); return new EventHubAsyncConsumer(receiveLinkMono, clonedOptions); } /** * Closes and disposes of connection to service. Any {@link EventHubAsyncConsumer EventHubConsumers} and {@link * EventHubAsyncProducer EventHubProducers} created with this instance will have their connections closed. */ @Override public void close() { if (hasConnection.getAndSet(false)) { try { final AmqpConnection connection = connectionMono.block(connectionOptions.getRetry().getTryTimeout()); if (connection != null) { connection.close(); } } catch (IOException exception) { throw logger.logExceptionAsError( new AmqpException(false, "Unable to close connection to service", exception, new ErrorContext(connectionOptions.getHost()))); } } } String eventHubName() { return this.eventHubName; } private static class ResponseMapper implements AmqpResponseMapper { @Override public EventHubProperties toEventHubProperties(Map<?, ?> amqpBody) { return new EventHubProperties( (String) amqpBody.get(ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY), ((Date) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_CREATED_AT)).toInstant(), (String[]) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_PARTITION_IDS)); } @Override public PartitionProperties toPartitionProperties(Map<?, ?> amqpBody) { return new PartitionProperties( (String) amqpBody.get(ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY), (String) amqpBody.get(ManagementChannel.MANAGEMENT_PARTITION_NAME_KEY), (Long) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_BEGIN_SEQUENCE_NUMBER), (Long) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_SEQUENCE_NUMBER), (String) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_OFFSET), ((Date) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_TIME_UTC)).toInstant(), (Boolean) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_PARTITION_IS_EMPTY)); } } }
class EventHubAsyncClient implements Closeable { /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; private static final String RECEIVER_ENTITY_PATH_FORMAT = "%s/ConsumerGroups/%s/Partitions/%s"; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private final ClientLogger logger = new ClientLogger(EventHubAsyncClient.class); private final String connectionId; private final Mono<EventHubConnection> connectionMono; private final AtomicBoolean hasConnection = new AtomicBoolean(false); private final ConnectionOptions connectionOptions; private final String eventHubName; private final EventHubProducerOptions defaultProducerOptions; private final EventHubConsumerOptions defaultConsumerOptions; private final TracerProvider tracerProvider; EventHubAsyncClient(ConnectionOptions connectionOptions, ReactorProvider provider, ReactorHandlerProvider handlerProvider, TracerProvider tracerProvider) { Objects.requireNonNull(connectionOptions, "'connectionOptions' cannot be null."); Objects.requireNonNull(provider, "'provider' cannot be null."); Objects.requireNonNull(handlerProvider, "'handlerProvider' cannot be null."); Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.connectionOptions = connectionOptions; this.tracerProvider = tracerProvider; this.eventHubName = connectionOptions.getEventHubName(); this.connectionId = StringUtil.getRandomString("MF"); this.connectionMono = Mono.fromCallable(() -> { return (EventHubConnection) new ReactorConnection(connectionId, connectionOptions, provider, handlerProvider, new ResponseMapper()); }).doOnSubscribe(c -> hasConnection.set(true)) .cache(); this.defaultProducerOptions = new EventHubProducerOptions() .setRetry(connectionOptions.getRetry()); this.defaultConsumerOptions = new EventHubConsumerOptions() .setRetry(connectionOptions.getRetry()) .setScheduler(connectionOptions.getScheduler()); } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getProperties() { return connectionMono .flatMap(connection -> connection .getManagementNode().flatMap(EventHubManagementNode::getEventHubProperties)); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ @ServiceMethod(returns = ReturnType.COLLECTION) public Flux<String> getPartitionIds() { return getProperties().flatMapMany(properties -> Flux.fromArray(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionMono.flatMap( connection -> connection.getManagementNode().flatMap(node -> { return node.getPartitionProperties(partitionId); })); } /** * Creates an Event Hub producer responsible for transmitting {@link EventData} to the Event Hub, grouped together * in batches. Event data is automatically routed to an available partition. * * @return A new {@link EventHubAsyncProducer}. */ public EventHubAsyncProducer createProducer() { return createProducer(defaultProducerOptions); } /** * Creates an Event Hub producer responsible for transmitting {@link EventData} to the Event Hub, grouped together * in batches. If {@link EventHubProducerOptions * events are routed to that specific partition. Otherwise, events are automatically routed to an available * partition. * * @param options The set of options to apply when creating the producer. * @return A new {@link EventHubAsyncProducer}. * @throws NullPointerException if {@code options} is {@code null}. */ public EventHubAsyncProducer createProducer(EventHubProducerOptions options) { Objects.requireNonNull(options, "'options' cannot be null."); final EventHubProducerOptions clonedOptions = options.clone(); if (clonedOptions.getRetry() == null) { clonedOptions.setRetry(connectionOptions.getRetry()); } final String entityPath; final String linkName; if (ImplUtils.isNullOrEmpty(options.getPartitionId())) { entityPath = eventHubName; linkName = StringUtil.getRandomString("EC"); } else { entityPath = String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, options.getPartitionId()); linkName = StringUtil.getRandomString("PS"); } final Mono<AmqpSendLink> amqpLinkMono = connectionMono .flatMap(connection -> connection.createSession(entityPath)) .flatMap(session -> { logger.verbose("Creating producer for {}", entityPath); final RetryPolicy retryPolicy = RetryUtil.getRetryPolicy(clonedOptions.getRetry()); return session.createProducer(linkName, entityPath, clonedOptions.getRetry().getTryTimeout(), retryPolicy).cast(AmqpSendLink.class); }); return new EventHubAsyncProducer(amqpLinkMono, clonedOptions, tracerProvider); } /** * Creates an Event Hub consumer responsible for reading {@link EventData} from a specific Event Hub partition, as a * member of the specified consumer group, and begins reading events from the {@code eventPosition}. * * The consumer created is non-exclusive, allowing multiple consumers from the same consumer group to be actively * reading events from the partition. These non-exclusive consumers are sometimes referred to as "Non-epoch * Consumers". * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * @param partitionId The identifier of the Event Hub partition. * @param eventPosition The position within the partition where the consumer should begin reading events. * @return A new {@link EventHubAsyncConsumer} that receives events from the partition at the given position. * @throws NullPointerException If {@code eventPosition}, or {@code options} is {@code null}. * @throws IllegalArgumentException If {@code consumerGroup} or {@code partitionId} is {@code null} or an empty * string. */ public EventHubAsyncConsumer createConsumer(String consumerGroup, String partitionId, EventPosition eventPosition) { return createConsumer(consumerGroup, partitionId, eventPosition, defaultConsumerOptions); } /** * Creates an Event Hub consumer responsible for reading {@link EventData} from a specific Event Hub partition, as a * member of the configured consumer group, and begins reading events from the specified {@code eventPosition}. * * <p> * A consumer may be exclusive, which asserts ownership over the partition for the consumer group to ensure that * only one consumer from that group is reading the from the partition. These exclusive consumers are sometimes * referred to as "Epoch Consumers." * * A consumer may also be non-exclusive, allowing multiple consumers from the same consumer group to be actively * reading events from the partition. These non-exclusive consumers are sometimes referred to as "Non-epoch * Consumers." * * Designating a consumer as exclusive may be specified in the {@code options}, by setting {@link * EventHubConsumerOptions * non-exclusive. * </p> * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * @param partitionId The identifier of the Event Hub partition from which events will be received. * @param eventPosition The position within the partition where the consumer should begin reading events. * @param options The set of options to apply when creating the consumer. * @return An new {@link EventHubAsyncConsumer} that receives events from the partition with all configured {@link * EventHubConsumerOptions}. * @throws NullPointerException If {@code eventPosition}, {@code consumerGroup}, {@code partitionId}, or * {@code options} is {@code null}. * @throws IllegalArgumentException If {@code consumerGroup} or {@code partitionId} is an empty string. */ public EventHubAsyncConsumer createConsumer(String consumerGroup, String partitionId, EventPosition eventPosition, EventHubConsumerOptions options) { Objects.requireNonNull(eventPosition, "'eventPosition' cannot be null."); Objects.requireNonNull(options, "'options' cannot be null."); Objects.requireNonNull(consumerGroup, "'consumerGroup' cannot be null."); Objects.requireNonNull(partitionId, "'partitionId' cannot be null."); if (consumerGroup.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'consumerGroup' cannot be an empty string.")); } else if (partitionId.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'partitionId' cannot be an empty string.")); } final EventHubConsumerOptions clonedOptions = options.clone(); if (clonedOptions.getScheduler() == null) { clonedOptions.setScheduler(connectionOptions.getScheduler()); } if (clonedOptions.getRetry() == null) { clonedOptions.setRetry(connectionOptions.getRetry()); } final String linkName = StringUtil.getRandomString("PR"); final String entityPath = String.format(Locale.US, RECEIVER_ENTITY_PATH_FORMAT, eventHubName, consumerGroup, partitionId); final Mono<AmqpReceiveLink> receiveLinkMono = connectionMono.flatMap(connection -> connection.createSession(entityPath).cast(EventHubSession.class)).flatMap(session -> { logger.verbose("Creating consumer for path: {}", entityPath); final RetryPolicy retryPolicy = RetryUtil.getRetryPolicy(clonedOptions.getRetry()); return session.createConsumer(linkName, entityPath, getExpression(eventPosition), clonedOptions.getRetry().getTryTimeout(), retryPolicy, options.getOwnerLevel(), options.getIdentifier()).cast(AmqpReceiveLink.class); }); return new EventHubAsyncConsumer(receiveLinkMono, clonedOptions); } /** * Closes and disposes of connection to service. Any {@link EventHubAsyncConsumer EventHubConsumers} and {@link * EventHubAsyncProducer EventHubProducers} created with this instance will have their connections closed. */ @Override public void close() { if (hasConnection.getAndSet(false)) { try { final AmqpConnection connection = connectionMono.block(connectionOptions.getRetry().getTryTimeout()); if (connection != null) { connection.close(); } } catch (IOException exception) { throw logger.logExceptionAsError( new AmqpException(false, "Unable to close connection to service", exception, new ErrorContext(connectionOptions.getHost()))); } } } String getEventHubName() { return this.eventHubName; } private static class ResponseMapper implements AmqpResponseMapper { @Override public EventHubProperties toEventHubProperties(Map<?, ?> amqpBody) { return new EventHubProperties( (String) amqpBody.get(ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY), ((Date) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_CREATED_AT)).toInstant(), (String[]) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_PARTITION_IDS)); } @Override public PartitionProperties toPartitionProperties(Map<?, ?> amqpBody) { return new PartitionProperties( (String) amqpBody.get(ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY), (String) amqpBody.get(ManagementChannel.MANAGEMENT_PARTITION_NAME_KEY), (Long) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_BEGIN_SEQUENCE_NUMBER), (Long) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_SEQUENCE_NUMBER), (String) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_OFFSET), ((Date) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_TIME_UTC)).toInstant(), (Boolean) amqpBody.get(ManagementChannel.MANAGEMENT_RESULT_PARTITION_IS_EMPTY)); } } }
Need to keep jvmTarget option since we have some tool-integration test on running a jar which by default use ballerina-tools distribution. We can do some modifications in integration test utils to run jvm integration tests on jballerina distribution. hence keep this for moment.
public void execute() { if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); outStream.println(commandUsageInfo); return; } if (argList != null && argList.size() > 1) { throw LauncherUtils.createUsageExceptionWithHelp("too many arguments"); } Path sourceRootPath = Paths.get(System.getProperty(USER_DIR)); if (nativeBinary) { genNativeBinary(sourceRootPath, argList); } else if (argList == null || argList.size() == 0) { BuilderUtils.compileWithTestsAndWrite(sourceRootPath, offline, lockEnabled, skiptests, experimentalFlag, siddhiRuntimeFlag); } else { String targetFileName; String pkgName = argList.get(0); if (pkgName.endsWith("/")) { pkgName = pkgName.substring(0, pkgName.length() - 1); } Path sourcePath = Paths.get(pkgName); pkgName = sourcePath.normalize().toString(); if (outputFileName != null && !outputFileName.isEmpty()) { targetFileName = outputFileName; } else { targetFileName = pkgName; } Path resolvedFullPath = sourceRootPath.resolve(sourcePath); if (Files.isRegularFile(resolvedFullPath) && sourcePath.toString().endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !RepoUtils.hasProjectRepo(sourceRootPath)) { targetFileName = getTargetFileName(Paths.get(targetFileName)); Path parent = resolvedFullPath.getParent(); sourceRootPath = parent != null ? parent : sourceRootPath; Path resolvedFileName = resolvedFullPath.getFileName(); pkgName = resolvedFileName != null ? resolvedFileName.toString() : pkgName; } else if (Files.isDirectory(sourceRootPath)) { if (Files.isDirectory(resolvedFullPath) && !RepoUtils.hasProjectRepo(sourceRootPath)) { throw LauncherUtils.createLauncherException("you are trying to build a module that is not inside " + "a project. Run `ballerina init` from " + sourceRootPath + " to initialize it as a " + "project and then build the module."); } if (Files.isRegularFile(resolvedFullPath) && !sourcePath.toString().endsWith(BLANG_SRC_FILE_SUFFIX)) { throw LauncherUtils.createLauncherException("only modules and " + BLANG_SRC_FILE_SUFFIX + " " + "files can be used with the 'ballerina build' " + "command."); } if (Files.exists(resolvedFullPath)) { if (Files.isRegularFile(resolvedFullPath) && !sourcePath.toString() .endsWith(BLANG_SRC_FILE_SUFFIX)) { throw LauncherUtils.createLauncherException("only modules and " + BLANG_SRC_FILE_SUFFIX + " " + "files can be used with the 'ballerina build' " + "command."); } } else { throw LauncherUtils.createLauncherException("ballerina source does not exist '" + sourcePath + "'"); } Path parentPath = sourcePath.getParent(); if (Files.isRegularFile(resolvedFullPath) && sourcePath.toString().endsWith(BLANG_SRC_FILE_SUFFIX) && parentPath != null) { throw LauncherUtils.createLauncherException("you are trying to build a ballerina file inside a " + "module within a project. Try running " + "'ballerina build <module-name>'"); } } else { throw LauncherUtils.createLauncherException("invalid ballerina source path, it should either be a " + "directory or a file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension"); } LauncherUtils.loadConfigurations(sourceRootPath, configFilePath); if (jvmTarget || JVM_TARGET.equals(System.getProperty(BALLERINA_TARGET))) { BuilderUtils.compileAndWriteJar(sourceRootPath, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests, experimentalFlag, dumpBIR); } else { BuilderUtils.compileWithTestsAndWrite(sourceRootPath, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests, experimentalFlag, siddhiRuntimeFlag); } } Runtime.getRuntime().exit(0); }
if (jvmTarget || JVM_TARGET.equals(System.getProperty(BALLERINA_TARGET))) {
public void execute() { if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(BUILD_COMMAND); outStream.println(commandUsageInfo); return; } if (argList != null && argList.size() > 1) { throw LauncherUtils.createUsageExceptionWithHelp("too many arguments"); } Path sourceRootPath = Paths.get(System.getProperty(USER_DIR)); if (nativeBinary) { genNativeBinary(sourceRootPath, argList); } else if (argList == null || argList.size() == 0) { BuilderUtils.compileWithTestsAndWrite(sourceRootPath, offline, lockEnabled, skiptests, experimentalFlag, siddhiRuntimeFlag); } else { String targetFileName; String pkgName = argList.get(0); if (pkgName.endsWith("/")) { pkgName = pkgName.substring(0, pkgName.length() - 1); } Path sourcePath = Paths.get(pkgName); pkgName = sourcePath.normalize().toString(); if (outputFileName != null && !outputFileName.isEmpty()) { targetFileName = outputFileName; } else { targetFileName = pkgName; } Path resolvedFullPath = sourceRootPath.resolve(sourcePath); if (Files.isRegularFile(resolvedFullPath) && sourcePath.toString().endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !RepoUtils.hasProjectRepo(sourceRootPath)) { targetFileName = getTargetFileName(Paths.get(targetFileName)); Path parent = resolvedFullPath.getParent(); sourceRootPath = parent != null ? parent : sourceRootPath; Path resolvedFileName = resolvedFullPath.getFileName(); pkgName = resolvedFileName != null ? resolvedFileName.toString() : pkgName; } else if (Files.isDirectory(sourceRootPath)) { if (Files.isDirectory(resolvedFullPath) && !RepoUtils.hasProjectRepo(sourceRootPath)) { throw LauncherUtils.createLauncherException("you are trying to build a module that is not inside " + "a project. Run `ballerina init` from " + sourceRootPath + " to initialize it as a " + "project and then build the module."); } if (Files.isRegularFile(resolvedFullPath) && !sourcePath.toString().endsWith(BLANG_SRC_FILE_SUFFIX)) { throw LauncherUtils.createLauncherException("only modules and " + BLANG_SRC_FILE_SUFFIX + " " + "files can be used with the 'ballerina build' " + "command."); } if (Files.exists(resolvedFullPath)) { if (Files.isRegularFile(resolvedFullPath) && !sourcePath.toString() .endsWith(BLANG_SRC_FILE_SUFFIX)) { throw LauncherUtils.createLauncherException("only modules and " + BLANG_SRC_FILE_SUFFIX + " " + "files can be used with the 'ballerina build' " + "command."); } } else { throw LauncherUtils.createLauncherException("ballerina source does not exist '" + sourcePath + "'"); } Path parentPath = sourcePath.getParent(); if (Files.isRegularFile(resolvedFullPath) && sourcePath.toString().endsWith(BLANG_SRC_FILE_SUFFIX) && parentPath != null) { throw LauncherUtils.createLauncherException("you are trying to build a ballerina file inside a " + "module within a project. Try running " + "'ballerina build <module-name>'"); } } else { throw LauncherUtils.createLauncherException("invalid ballerina source path, it should either be a " + "directory or a file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension"); } LauncherUtils.loadConfigurations(sourceRootPath, configFilePath); if (jvmTarget || JVM_TARGET.equals(System.getProperty(BALLERINA_TARGET))) { BuilderUtils.compileAndWriteJar(sourceRootPath, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests, experimentalFlag, dumpBIR); } else { BuilderUtils.compileWithTestsAndWrite(sourceRootPath, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests, experimentalFlag, siddhiRuntimeFlag); } } Runtime.getRuntime().exit(0); }
class BuildCommand implements BLauncherCmd { private static final String USER_DIR = "user.dir"; private static PrintStream outStream = System.err; @CommandLine.Option(names = {"-c"}, description = "build a compiled module") private boolean buildCompiledPkg; @CommandLine.Option(names = {"-o"}, description = "write output to the given file") private String outputFileName; @CommandLine.Option(names = {"--offline"}) private boolean offline; @CommandLine.Option(names = {"--lockEnabled"}) private boolean lockEnabled; @CommandLine.Option(names = {"--skiptests"}) private boolean skiptests; @CommandLine.Parameters private List<String> argList; @CommandLine.Option(names = {"--native"}, hidden = true, description = "compile Ballerina program to a native binary") private boolean nativeBinary; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-llvm-ir", hidden = true) private boolean dumpLLVMIR; @CommandLine.Option(names = {"--jvmTarget"}, hidden = true, description = "compile Ballerina program to a jvm class") private boolean jvmTarget; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "enable experimental language features") private boolean experimentalFlag; @CommandLine.Option(names = {"--config"}, description = "path to the configuration file") private String configFilePath; @CommandLine.Option(names = "--siddhiruntime", description = "enable siddhi runtime for stream processing") private boolean siddhiRuntimeFlag; /** * Get the target file path for a single bal file. * * @param targetPath target path given * @return actual target path */ private String getTargetFileName(Path targetPath) { if (outputFileName == null && targetPath.getParent() != null) { Path targetFileName = targetPath.getFileName(); if (targetFileName != null) { return targetFileName.toString(); } } return targetPath.toString(); } @Override public String getName() { return BUILD_COMMAND; } @Override public void printLongDesc(StringBuilder out) { out.append("Compiles Ballerina sources and writes the output to a file. \n"); out.append("\n"); out.append("By default, output filename is the last part of module name \n"); out.append("or the filename (minus the extension) with the extension \".balx\". \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to that file. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina build <balfile | module-name> [-o output] \n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } private void genNativeBinary(Path projectDirPath, List<String> argList) { throw LauncherUtils.createLauncherException("llvm native generation is not supported"); } }
class BuildCommand implements BLauncherCmd { private static final String USER_DIR = "user.dir"; private static PrintStream outStream = System.err; @CommandLine.Option(names = {"-c"}, description = "build a compiled module") private boolean buildCompiledPkg; @CommandLine.Option(names = {"-o"}, description = "write output to the given file") private String outputFileName; @CommandLine.Option(names = {"--offline"}) private boolean offline; @CommandLine.Option(names = {"--lockEnabled"}) private boolean lockEnabled; @CommandLine.Option(names = {"--skiptests"}) private boolean skiptests; @CommandLine.Parameters private List<String> argList; @CommandLine.Option(names = {"--native"}, hidden = true, description = "compile Ballerina program to a native binary") private boolean nativeBinary; @CommandLine.Option(names = "--dump-bir", hidden = true) private boolean dumpBIR; @CommandLine.Option(names = "--dump-llvm-ir", hidden = true) private boolean dumpLLVMIR; @CommandLine.Option(names = {"--jvmTarget"}, hidden = true, description = "compile Ballerina program to a jvm class") private boolean jvmTarget; @CommandLine.Option(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @CommandLine.Option(names = "--experimental", description = "enable experimental language features") private boolean experimentalFlag; @CommandLine.Option(names = {"--config"}, description = "path to the configuration file") private String configFilePath; @CommandLine.Option(names = "--siddhiruntime", description = "enable siddhi runtime for stream processing") private boolean siddhiRuntimeFlag; /** * Get the target file path for a single bal file. * * @param targetPath target path given * @return actual target path */ private String getTargetFileName(Path targetPath) { if (outputFileName == null && targetPath.getParent() != null) { Path targetFileName = targetPath.getFileName(); if (targetFileName != null) { return targetFileName.toString(); } } return targetPath.toString(); } @Override public String getName() { return BUILD_COMMAND; } @Override public void printLongDesc(StringBuilder out) { out.append("Compiles Ballerina sources and writes the output to a file. \n"); out.append("\n"); out.append("By default, output filename is the last part of module name \n"); out.append("or the filename (minus the extension) with the extension \".balx\". \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to that file. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina build <balfile | module-name> [-o output] \n"); } @Override public void setParentCmdParser(CommandLine parentCmdParser) { } private void genNativeBinary(Path projectDirPath, List<String> argList) { throw LauncherUtils.createLauncherException("llvm native generation is not supported"); } }
Why do you close again if the session is already closed? Shouldn't the condition be negation?
public static void closeIfAnonymousSession(BMap<String, BValue> obj) throws ActiveMQException { boolean anonymousSession = ((BBoolean) obj.get("anonymousSession")).booleanValue(); if (anonymousSession) { ClientSession session = ArtemisUtils.getClientSessionFromBMap(obj); if (session.isClosed()) { session.close(); } } }
if (session.isClosed()) {
public static void closeIfAnonymousSession(BMap<String, BValue> obj) throws ActiveMQException { boolean anonymousSession = ((BBoolean) obj.get("anonymousSession")).booleanValue(); if (anonymousSession) { ClientSession session = ArtemisUtils.getClientSessionFromBMap(obj); if (!session.isClosed()) { session.close(); } } }
class ArtemisUtils { private static final Logger LOGGER = LoggerFactory.getLogger(ArtemisUtils.class); public static void throwBallerinaException(String message, Context context, Throwable throwable) { LOGGER.error(message, throwable); throw new BallerinaException(message, throwable, context); } /** * Get error struct. * * @param context Represent ballerina context * @param errMsg Error message * @return Error struct */ public static BError getError(Context context, String errMsg) { BMap<String, BValue> artemisErrorRecord = createArtemisErrorRecord(context); artemisErrorRecord.put(ArtemisConstants.ARTEMIS_ERROR_MESSAGE, new BString(errMsg)); return BLangVMErrors.createError(context, true, BTypes.typeError, ArtemisConstants.ARTEMIS_ERROR_CODE, artemisErrorRecord); } private static BMap<String, BValue> createArtemisErrorRecord(Context context) { return BLangConnectorSPIUtil.createBStruct(context, ArtemisConstants.PROTOCOL_PACKAGE_ARTEMIS, ArtemisConstants.ARTEMIS_ERROR_RECORD); } /** * Get error struct from throwable. * * @param context Represent ballerina context * @param throwable Throwable representing the error. * @return Error struct */ public static BError getError(Context context, Throwable throwable) { if (throwable.getMessage() == null) { return getError(context, "Artemis connector error"); } else { return getError(context, throwable.getMessage()); } } public static int getIntFromConfig(BMap<String, BValue> config, String key, Logger logger) { return getIntFromLong(((BInteger) config.get(key)).intValue(), key, logger); } public static int getIntFromLong(long longVal, String key, Logger logger) { if (longVal <= 0) { return -1; } try { return Math.toIntExact(longVal); } catch (ArithmeticException e) { logger.warn("The value set for {} needs to be less than {}. The {} value is set to {}", key, Integer.MAX_VALUE, key, Integer.MAX_VALUE); return Integer.MAX_VALUE; } } public static RoutingType getRoutingTypeFromConfig(String type) { if ("MULTICAST".equals(type)) { return RoutingType.MULTICAST; } return RoutingType.ANYCAST; } public static BValue getBValFromObj(Object obj, Context context) { if (obj instanceof String) { return new BString((String) obj); } else if (obj instanceof Integer) { return new BInteger((int) obj); } else if (obj instanceof Long) { return new BInteger((long) obj); } else if (obj instanceof Short) { return new BInteger((short) obj); } else if (obj instanceof Float) { return new BFloat((float) obj); } else if (obj instanceof Double) { return new BFloat((double) obj); } else if (obj instanceof Boolean) { return new BBoolean((boolean) obj); } else if (obj instanceof Byte) { return new BByte((byte) obj); } else if (obj instanceof byte[]) { return new BValueArray((byte[]) obj); } else { return ArtemisUtils.getError(context, new BallerinaException("Unsupported type")); } } public static RoutingType getRoutingTypeFromString(String routingType) { return ArtemisConstants.ANYCAST.equals(routingType) ? RoutingType.ANYCAST : RoutingType.MULTICAST; } public static ClientSession getClientSessionFromBMap(BMap<String, BValue> obj) { @SuppressWarnings(ArtemisConstants.UNCHECKED) BMap<String, BValue> sessionObj = (BMap<String, BValue>) obj.get("session"); return (ClientSession) sessionObj.getNativeData(ArtemisConstants.ARTEMIS_SESSION); } private ArtemisUtils() { } }
class ArtemisUtils { /** * Util function to throw a {@link BallerinaException}. * * @param message the error message * @param context the Ballerina context * @param exception the exception to be propagated * @param logger the logger to log errors */ public static void throwBallerinaException(String message, Context context, Exception exception, Logger logger) { logger.error(message, exception); throw new BallerinaException(message, exception, context); } /** * Get error struct. * * @param context Represent ballerina context * @param errMsg Error message * @return Error struct */ public static BError getError(Context context, String errMsg) { BMap<String, BValue> artemisErrorRecord = createArtemisErrorRecord(context); artemisErrorRecord.put(ArtemisConstants.ARTEMIS_ERROR_MESSAGE, new BString(errMsg)); return BLangVMErrors.createError(context, true, BTypes.typeError, ArtemisConstants.ARTEMIS_ERROR_CODE, artemisErrorRecord); } private static BMap<String, BValue> createArtemisErrorRecord(Context context) { return BLangConnectorSPIUtil.createBStruct(context, ArtemisConstants.PROTOCOL_PACKAGE_ARTEMIS, ArtemisConstants.ARTEMIS_ERROR_RECORD); } /** * Get error struct from throwable. * * @param context Represent ballerina context * @param exception Throwable representing the error. * @return Error struct */ public static BError getError(Context context, Exception exception) { if (exception.getMessage() == null) { return getError(context, "Artemis connector error"); } else { return getError(context, exception.getMessage()); } } /** * Gets an int from the {@link BMap} config. * * @param config the BMap config * @param key the key that has an integer value * @param logger the logger to log errors * @return the relevant int value from the config */ public static int getIntFromConfig(BMap<String, BValue> config, String key, Logger logger) { return getIntFromLong(((BInteger) config.get(key)).intValue(), key, logger); } /** * Gets an integer from a long value. Handles errors appropriately. * * @param longVal the long value. * @param name the name of the long value: useful for logging the error. * @param logger the logger to log errors * @return the int value from the given long value */ public static int getIntFromLong(long longVal, String name, Logger logger) { if (longVal <= 0) { return -1; } try { return Math.toIntExact(longVal); } catch (ArithmeticException e) { logger.warn("The value set for {} needs to be less than {}. The {} value is set to {}", name, Integer.MAX_VALUE, name, Integer.MAX_VALUE); return Integer.MAX_VALUE; } } /** * Get the relevant BValure for an Object. * * @param obj the Object * @param context the Ballerina context to to be used in case of errors * @return the relevant BValue for the object or error */ public static BValue getBValueFromObj(Object obj, Context context) { if (obj instanceof String) { return new BString((String) obj); } else if (obj instanceof SimpleString) { return new BString(((SimpleString) obj).toString()); } else if (obj instanceof Integer) { return new BInteger((int) obj); } else if (obj instanceof Long) { return new BInteger((long) obj); } else if (obj instanceof Short) { return new BInteger((short) obj); } else if (obj instanceof Float) { return new BFloat((float) obj); } else if (obj instanceof Double) { return new BFloat((double) obj); } else if (obj instanceof Boolean) { return new BBoolean((boolean) obj); } else if (obj instanceof Byte) { return new BByte((byte) obj); } else if (obj instanceof byte[]) { return new BValueArray((byte[]) obj); } else { return ArtemisUtils.getError(context, "Unsupported type"); } } /** * Gets the {@link RoutingType} from the String type. * * @param routingType the string routing type * @return the relevant {@link RoutingType} */ public static RoutingType getRoutingTypeFromString(String routingType) { return ArtemisConstants.MULTICAST.equals(routingType) ? RoutingType.ANYCAST : RoutingType.MULTICAST; } /** * Get the natively stored {@link ClientSession} from the BMap. * * @param obj the Ballerina object as a BMap * @return the natively stored {@link ClientSession} */ public static ClientSession getClientSessionFromBMap(BMap<String, BValue> obj) { @SuppressWarnings(ArtemisConstants.UNCHECKED) BMap<String, BValue> sessionObj = (BMap<String, BValue>) obj.get("session"); return (ClientSession) sessionObj.getNativeData(ArtemisConstants.ARTEMIS_SESSION); } /** * Close the session if it has been created implicitly identified by the anonymousSession field in the Ballerina * object. * * @param obj the Ballerina object as a BMap * @throws ActiveMQException on session closure failure */ private ArtemisUtils() { } }
Seems `tryLock`'s parameter `lockName` is still handled as `schemaName` in `ShardingSphereDistributeGlobalLock.innerTryLock`. `LockContext` is just designed for schema lock for now. Could we use `PipelineSimpleLock` to implement it?
private void prepareTarget(final JobConfiguration jobConfig, final PipelineDataSourceManager dataSourceManager) { DataSourcePreparer dataSourcePreparer = EnvironmentCheckerFactory.getDataSourcePreparer(jobConfig.getHandleConfig().getTargetDatabaseType()); if (null == dataSourcePreparer) { log.info("dataSourcePreparer null, ignore prepare target"); return; } LockContext lockContext = PipelineContext.getContextManager().getInstanceContext().getLockContext(); ShardingSphereLock lock = lockContext.getOrCreateSchemaLock(jobConfig.getWorkflowConfig().getSchemaName()); boolean skipPrepare = !lock.tryLock("prepareTargetTablesLock", 100); if (skipPrepare) { while (lock.isLocked("prepareTargetTablesLock")) { try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { lock.releaseLock("prepareTargetTablesLock"); } } lock.releaseLock("prepareTargetTablesLock"); return; } try { JobDataNodeLine tablesFirstDataNodes = JobDataNodeLine.unmarshal(jobConfig.getHandleConfig().getTablesFirstDataNodes()); PrepareTargetTablesParameter prepareTargetTablesParameter = new PrepareTargetTablesParameter(tablesFirstDataNodes, jobConfig.getPipelineConfig(), dataSourceManager); dataSourcePreparer.prepareTargetTables(prepareTargetTablesParameter); } finally { lock.releaseLock("prepareTargetTablesLock"); } }
boolean skipPrepare = !lock.tryLock("prepareTargetTablesLock", 100);
private void prepareTarget(final JobConfiguration jobConfig, final PipelineDataSourceManager dataSourceManager) { DataSourcePreparer dataSourcePreparer = EnvironmentCheckerFactory.getDataSourcePreparer(jobConfig.getHandleConfig().getTargetDatabaseType()); if (null == dataSourcePreparer) { log.info("dataSourcePreparer null, ignore prepare target"); return; } final PipelineSimpleLock lock = PipelineSimpleLock.getInstance(); boolean skipPrepare = !lock.tryLock(getPrepareLockName(jobConfig.getHandleConfig().getJobId()), 100); if (skipPrepare) { int loopCount = 0; int maxLoopCount = 30; while (loopCount < maxLoopCount) { try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { lock.releaseLock(getPrepareLockName(jobConfig.getHandleConfig().getJobId())); } if (lock.tryLock(getPrepareLockName(jobConfig.getHandleConfig().getJobId()), 100)) { lock.releaseLock(getPrepareLockName(jobConfig.getHandleConfig().getJobId())); return; } loopCount++; } } try { JobDataNodeLine tablesFirstDataNodes = JobDataNodeLine.unmarshal(jobConfig.getHandleConfig().getTablesFirstDataNodes()); PrepareTargetTablesParameter prepareTargetTablesParameter = new PrepareTargetTablesParameter(tablesFirstDataNodes, jobConfig.getPipelineConfig(), dataSourceManager); dataSourcePreparer.prepareTargetTables(prepareTargetTablesParameter); } finally { lock.releaseLock(getPrepareLockName(jobConfig.getHandleConfig().getJobId())); } }
class RuleAlteredJobPreparer { static { ShardingSphereServiceLoader.register(DataSourceChecker.class); } private final InventoryTaskSplitter inventoryTaskSplitter = new InventoryTaskSplitter(); /** * Do prepare work for scaling job. * * @param jobContext job context */ public void prepare(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); prepareTarget(jobContext.getJobConfig(), dataSourceManager); initAndCheckDataSource(jobContext); try { initIncrementalTasks(jobContext); initInventoryTasks(jobContext); log.info("prepare, jobId={}, shardingItem={}, inventoryTasks={}, incrementalTasks={}", jobContext.getJobId(), jobContext.getShardingItem(), jobContext.getInventoryTasks(), jobContext.getIncrementalTasks()); } catch (final SQLException ex) { log.error("Scaling job preparing failed, jobId={}", jobContext.getJobId()); throw new PipelineJobPrepareFailedException("Scaling job preparing failed, jobId=" + jobContext.getJobId(), ex); } } private void initAndCheckDataSource(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); TaskConfiguration taskConfig = jobContext.getTaskConfig(); PipelineDataSourceWrapper sourceDataSource = dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig()); PipelineDataSourceWrapper targetDataSource = dataSourceManager.getDataSource(taskConfig.getImporterConfig().getDataSourceConfig()); checkSourceDataSource(jobContext, sourceDataSource); JobProgress initProgress = jobContext.getInitProgress(); if (null == initProgress || initProgress.getStatus() == JobStatus.PREPARING_FAILURE) { checkTargetDataSource(jobContext, targetDataSource); } } private void checkSourceDataSource(final RuleAlteredJobContext jobContext, final PipelineDataSourceWrapper sourceDataSource) { DataSourceChecker dataSourceChecker = TypedSPIRegistry.getRegisteredService(DataSourceChecker.class, jobContext.getJobConfig().getHandleConfig().getSourceDatabaseType()); Collection<PipelineDataSourceWrapper> sourceDataSources = Collections.singleton(sourceDataSource); dataSourceChecker.checkConnection(sourceDataSources); dataSourceChecker.checkPrivilege(sourceDataSources); dataSourceChecker.checkVariable(sourceDataSources); } private void checkTargetDataSource(final RuleAlteredJobContext jobContext, final PipelineDataSourceWrapper targetDataSource) { DataSourceChecker dataSourceChecker = TypedSPIRegistry.getRegisteredService(DataSourceChecker.class, jobContext.getJobConfig().getHandleConfig().getTargetDatabaseType()); Collection<PipelineDataSourceWrapper> targetDataSources = Collections.singletonList(targetDataSource); dataSourceChecker.checkConnection(targetDataSources); dataSourceChecker.checkTargetTable(targetDataSources, jobContext.getTaskConfig().getImporterConfig().getShardingColumnsMap().keySet()); } private void initInventoryTasks(final RuleAlteredJobContext jobContext) { List<InventoryTask> allInventoryTasks = inventoryTaskSplitter.splitInventoryData(jobContext); jobContext.getInventoryTasks().addAll(allInventoryTasks); } private void initIncrementalTasks(final RuleAlteredJobContext jobContext) throws SQLException { PipelineChannelFactory pipelineChannelFactory = jobContext.getRuleAlteredContext().getPipelineChannelFactory(); ExecuteEngine incrementalDumperExecuteEngine = jobContext.getRuleAlteredContext().getIncrementalDumperExecuteEngine(); TaskConfiguration taskConfig = jobContext.getTaskConfig(); PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); taskConfig.getDumperConfig().setPosition(getIncrementalPosition(jobContext, taskConfig, dataSourceManager)); PipelineTableMetaDataLoader sourceMetaDataLoader = jobContext.getSourceMetaDataLoader(); IncrementalTask incrementalTask = new IncrementalTask(taskConfig.getHandleConfig().getConcurrency(), taskConfig.getDumperConfig(), taskConfig.getImporterConfig(), pipelineChannelFactory, dataSourceManager, sourceMetaDataLoader, incrementalDumperExecuteEngine); jobContext.getIncrementalTasks().add(incrementalTask); } private IngestPosition<?> getIncrementalPosition( final RuleAlteredJobContext jobContext, final TaskConfiguration taskConfig, final PipelineDataSourceManager dataSourceManager) throws SQLException { if (null != jobContext.getInitProgress()) { Optional<IngestPosition<?>> positionOptional = jobContext.getInitProgress().getIncrementalPosition(taskConfig.getDumperConfig().getDataSourceName()); if (positionOptional.isPresent()) { return positionOptional.get(); } } String databaseType = taskConfig.getHandleConfig().getSourceDatabaseType(); DataSource dataSource = dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig()); return PositionInitializerFactory.getPositionInitializer(databaseType).init(dataSource); } /** * Do cleanup work for scaling job. * * @param jobContext job context */ public void cleanup(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); try { TaskConfiguration taskConfig = jobContext.getTaskConfig(); PositionInitializer positionInitializer = PositionInitializerFactory.getPositionInitializer(taskConfig.getHandleConfig().getSourceDatabaseType()); positionInitializer.destroy(dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig())); } catch (final SQLException ex) { log.warn("Scaling job destroying failed", ex); } } }
class RuleAlteredJobPreparer { static { ShardingSphereServiceLoader.register(DataSourceChecker.class); } private final InventoryTaskSplitter inventoryTaskSplitter = new InventoryTaskSplitter(); /** * Do prepare work for scaling job. * * @param jobContext job context */ public void prepare(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); prepareTarget(jobContext.getJobConfig(), dataSourceManager); initAndCheckDataSource(jobContext); try { initIncrementalTasks(jobContext); initInventoryTasks(jobContext); log.info("prepare, jobId={}, shardingItem={}, inventoryTasks={}, incrementalTasks={}", jobContext.getJobId(), jobContext.getShardingItem(), jobContext.getInventoryTasks(), jobContext.getIncrementalTasks()); } catch (final SQLException ex) { log.error("Scaling job preparing failed, jobId={}", jobContext.getJobId()); throw new PipelineJobPrepareFailedException("Scaling job preparing failed, jobId=" + jobContext.getJobId(), ex); } } private String getPrepareLockName(final String jobId) { return "prepare-" + jobId; } private void initAndCheckDataSource(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); TaskConfiguration taskConfig = jobContext.getTaskConfig(); PipelineDataSourceWrapper sourceDataSource = dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig()); PipelineDataSourceWrapper targetDataSource = dataSourceManager.getDataSource(taskConfig.getImporterConfig().getDataSourceConfig()); checkSourceDataSource(jobContext, sourceDataSource); JobProgress initProgress = jobContext.getInitProgress(); if (null == initProgress || initProgress.getStatus() == JobStatus.PREPARING_FAILURE) { checkTargetDataSource(jobContext, targetDataSource); } } private void checkSourceDataSource(final RuleAlteredJobContext jobContext, final PipelineDataSourceWrapper sourceDataSource) { DataSourceChecker dataSourceChecker = TypedSPIRegistry.getRegisteredService(DataSourceChecker.class, jobContext.getJobConfig().getHandleConfig().getSourceDatabaseType()); Collection<PipelineDataSourceWrapper> sourceDataSources = Collections.singleton(sourceDataSource); dataSourceChecker.checkConnection(sourceDataSources); dataSourceChecker.checkPrivilege(sourceDataSources); dataSourceChecker.checkVariable(sourceDataSources); } private void checkTargetDataSource(final RuleAlteredJobContext jobContext, final PipelineDataSourceWrapper targetDataSource) { DataSourceChecker dataSourceChecker = TypedSPIRegistry.getRegisteredService(DataSourceChecker.class, jobContext.getJobConfig().getHandleConfig().getTargetDatabaseType()); Collection<PipelineDataSourceWrapper> targetDataSources = Collections.singletonList(targetDataSource); dataSourceChecker.checkConnection(targetDataSources); dataSourceChecker.checkTargetTable(targetDataSources, jobContext.getTaskConfig().getImporterConfig().getShardingColumnsMap().keySet()); } private void initInventoryTasks(final RuleAlteredJobContext jobContext) { List<InventoryTask> allInventoryTasks = inventoryTaskSplitter.splitInventoryData(jobContext); jobContext.getInventoryTasks().addAll(allInventoryTasks); } private void initIncrementalTasks(final RuleAlteredJobContext jobContext) throws SQLException { PipelineChannelFactory pipelineChannelFactory = jobContext.getRuleAlteredContext().getPipelineChannelFactory(); ExecuteEngine incrementalDumperExecuteEngine = jobContext.getRuleAlteredContext().getIncrementalDumperExecuteEngine(); TaskConfiguration taskConfig = jobContext.getTaskConfig(); PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); taskConfig.getDumperConfig().setPosition(getIncrementalPosition(jobContext, taskConfig, dataSourceManager)); PipelineTableMetaDataLoader sourceMetaDataLoader = jobContext.getSourceMetaDataLoader(); IncrementalTask incrementalTask = new IncrementalTask(taskConfig.getHandleConfig().getConcurrency(), taskConfig.getDumperConfig(), taskConfig.getImporterConfig(), pipelineChannelFactory, dataSourceManager, sourceMetaDataLoader, incrementalDumperExecuteEngine); jobContext.getIncrementalTasks().add(incrementalTask); } private IngestPosition<?> getIncrementalPosition( final RuleAlteredJobContext jobContext, final TaskConfiguration taskConfig, final PipelineDataSourceManager dataSourceManager) throws SQLException { if (null != jobContext.getInitProgress()) { Optional<IngestPosition<?>> positionOptional = jobContext.getInitProgress().getIncrementalPosition(taskConfig.getDumperConfig().getDataSourceName()); if (positionOptional.isPresent()) { return positionOptional.get(); } } String databaseType = taskConfig.getHandleConfig().getSourceDatabaseType(); DataSource dataSource = dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig()); return PositionInitializerFactory.getPositionInitializer(databaseType).init(dataSource); } /** * Do cleanup work for scaling job. * * @param jobContext job context */ public void cleanup(final RuleAlteredJobContext jobContext) { PipelineDataSourceManager dataSourceManager = jobContext.getDataSourceManager(); try { TaskConfiguration taskConfig = jobContext.getTaskConfig(); PositionInitializer positionInitializer = PositionInitializerFactory.getPositionInitializer(taskConfig.getHandleConfig().getSourceDatabaseType()); positionInitializer.destroy(dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig())); } catch (final SQLException ex) { log.warn("Scaling job destroying failed", ex); } } }
Please remove this useless blank line.
public void assertNewInstanceForSQLServer() { SQLStatement statement = new SQLServerDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); }
public void assertNewInstanceForSQLServer() { SQLStatement statement = new SQLServerDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); }
class SingleTableMetadataValidatorFactoryTest { @Test @SuppressWarnings("rawtypes") public void assertNewInstanceForPostgreSQL() { SQLStatement statement = new PostgreSQLDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); } @Test @SuppressWarnings("rawtypes") @Test @SuppressWarnings("rawtypes") public void assertNewInstanceForOpenGauss() { SQLStatement statement = new OpenGaussDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); } }
class SingleTableMetadataValidatorFactoryTest { @Test @SuppressWarnings("rawtypes") public void assertNewInstanceForPostgreSQL() { SQLStatement statement = new PostgreSQLDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); } @Test @SuppressWarnings("rawtypes") @Test @SuppressWarnings("rawtypes") public void assertNewInstanceForOpenGauss() { SQLStatement statement = new OpenGaussDropSchemaStatement(); Optional<SingleTableMetadataValidator> actual = SingleTableMetadataValidatorFactory.newInstance(statement); assertTrue(actual.isPresent()); } }
Do we need a separate param for useSeparateMetadataDb? We could just check if (isRbac) here. Right?
protected void before() throws Throwable { final ChangeStreamTestPipelineOptions options = IOITHelper.readIOTestPipelineOptions(ChangeStreamTestPipelineOptions.class); projectId = Optional.ofNullable(options.getProjectId()) .orElseGet(() -> options.as(GcpOptions.class).getProject()); instanceId = options.getInstanceId(); generateDatabaseIds(options); spanner = SpannerOptions.newBuilder().setProjectId(projectId).setHost(host).build().getService(); databaseAdminClient = spanner.getDatabaseAdminClient(); metadataTableName = generateTableName(METADATA_TABLE_NAME_PREFIX); databases = new ArrayList<>(); recreateDatabase(databaseAdminClient, instanceId, databaseId, isPostgres); databaseClient = spanner.getDatabaseClient(DatabaseId.of(projectId, instanceId, databaseId)); databases.add(databaseId); if (createRbacDatabase) { recreateDatabase(databaseAdminClient, instanceId, rbacDatabaseId, isPostgres); rbacDatabaseClient = spanner.getDatabaseClient(DatabaseId.of(projectId, instanceId, rbacDatabaseId)); databases.add(rbacDatabaseId); } if (useSeparateMetadataDb) { recreateDatabase(databaseAdminClient, instanceId, metadataDatabaseId, isPostgres); } changeStreams = new ArrayList<>(); tables = new ArrayList<>(); }
if (useSeparateMetadataDb) {
protected void before() throws Throwable { final ChangeStreamTestPipelineOptions options = IOITHelper.readIOTestPipelineOptions(ChangeStreamTestPipelineOptions.class); projectId = Optional.ofNullable(options.getProjectId()) .orElseGet(() -> options.as(GcpOptions.class).getProject()); instanceId = options.getInstanceId(); generateDatabaseIds(options); spanner = SpannerOptions.newBuilder().setProjectId(projectId).setHost(host).build().getService(); databaseAdminClient = spanner.getDatabaseAdminClient(); metadataTableName = generateTableName(METADATA_TABLE_NAME_PREFIX); recreateDatabase(databaseAdminClient, instanceId, databaseId, isPostgres); databaseClient = spanner.getDatabaseClient(DatabaseId.of(projectId, instanceId, databaseId)); changeStreams = new ArrayList<>(); tables = new ArrayList<>(); }
class IntegrationTestEnv extends ExternalResource { private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestEnv.class); private static final int TIMEOUT_MINUTES = 10; private static final int MAX_POSTGRES_TABLE_NAME_LENGTH = 63; private static final int MAX_CHANGE_STREAM_NAME_LENGTH = 30; private static final int MAX_DATABASE_NAME_LENGTH = 30; private static final String METADATA_TABLE_NAME_PREFIX = "TestMetadata"; private static final String SINGERS_TABLE_NAME_PREFIX = "Singers"; private static final String CHANGE_STREAM_NAME_PREFIX = "SingersStream"; private static final String DATABASE_ROLE = "test_role"; private List<String> databases; private List<String> changeStreams; private List<String> tables; private String projectId; private String instanceId; private String databaseId; private String rbacDatabaseId; private String metadataDatabaseId; private String metadataTableName; private Spanner spanner; private final String host = "https: private DatabaseAdminClient databaseAdminClient; private DatabaseClient databaseClient; private DatabaseClient rbacDatabaseClient; private boolean isPostgres; public boolean createRbacDatabase; public boolean useSeparateMetadataDb; public IntegrationTestEnv withRbacDatabase() { this.createRbacDatabase = true; return this; } public IntegrationTestEnv withPostgres() { this.isPostgres = true; return this; } public IntegrationTestEnv withSeparateMetadataDb() { this.useSeparateMetadataDb = true; return this; } public static IntegrationTestEnv create() { return new IntegrationTestEnv(); } @Override @Override protected void after() { for (String database : databases) { for (String changeStream : changeStreams) { try { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList("DROP CHANGE STREAM \"" + changeStream + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList("DROP CHANGE STREAM " + changeStream), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } catch (Exception e) { LOG.error("Failed to drop change stream " + changeStream + ". Skipping...", e); } } for (String table : tables) { try { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList("DROP TABLE \"" + table + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList("DROP TABLE " + table), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } catch (Exception e) { LOG.error("Failed to drop table " + table + ". Skipping...", e); } } try { databaseAdminClient.dropDatabase(instanceId, database); } catch (Exception e) { LOG.error("Failed to drop database " + database + ". Skipping...", e); } } if (useSeparateMetadataDb) { databaseAdminClient.dropDatabase(instanceId, metadataDatabaseId); } spanner.close(); } String createSingersTable() throws InterruptedException, ExecutionException, TimeoutException { final String tableName = generateTableName(SINGERS_TABLE_NAME_PREFIX); LOG.info("Creating table " + tableName); for (String database : databases) { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList( "CREATE TABLE \"" + tableName + "\" (" + " \"SingerId\" BIGINT NOT NULL," + " \"FirstName\" text," + " \"LastName\" text," + " \"SingerInfo\" bytea," + " PRIMARY KEY (\"SingerId\")" + ")"), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList( "CREATE TABLE " + tableName + " (" + " SingerId INT64 NOT NULL," + " FirstName STRING(1024)," + " LastName STRING(1024)," + " SingerInfo BYTES(MAX)" + " ) PRIMARY KEY (SingerId)"), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } tables.add(tableName); return tableName; } String createChangeStreamFor(String tableName) throws InterruptedException, ExecutionException, TimeoutException { final String changeStreamName = generateChangeStreamName(); LOG.info("CREATE CHANGE STREAM \"" + changeStreamName + "\" FOR \"" + tableName + "\""); for (String database : databases) { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList( "CREATE CHANGE STREAM \"" + changeStreamName + "\" FOR \"" + tableName + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, database, Collections.singletonList( "CREATE CHANGE STREAM " + changeStreamName + " FOR " + tableName), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } changeStreams.add(changeStreamName); return changeStreamName; } void createRoleAndGrantPrivileges(String table, String changeStream) throws InterruptedException, ExecutionException, TimeoutException { if (this.isPostgres) { LOG.error("Database roles not supported with Postgres dialect."); return; } databaseAdminClient .updateDatabaseDdl( instanceId, rbacDatabaseId, Arrays.asList( "CREATE ROLE " + DATABASE_ROLE, "GRANT INSERT, UPDATE, DELETE ON TABLE " + table + " TO ROLE " + DATABASE_ROLE, "GRANT SELECT ON CHANGE STREAM " + changeStream + " TO ROLE " + DATABASE_ROLE, "GRANT EXECUTE ON TABLE FUNCTION READ_" + changeStream + " TO ROLE " + DATABASE_ROLE), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); return; } String getProjectId() { return projectId; } String getInstanceId() { return instanceId; } String getDatabaseId() { return databaseId; } String getRbacDatabaseId() { return rbacDatabaseId; } String getMetadataDatabaseId() { return metadataDatabaseId; } String getDatabaseRole() { return DATABASE_ROLE; } String getMetadataTableName() { return metadataTableName; } DatabaseClient getDatabaseClient() { return databaseClient; } DatabaseClient getRbacDatabaseClient() { return rbacDatabaseClient; } private void recreateDatabase( DatabaseAdminClient databaseAdminClient, String instanceId, String databaseId, boolean isPostgres) throws ExecutionException, InterruptedException, TimeoutException { databaseAdminClient.dropDatabase(instanceId, databaseId); LOG.info("Creating database " + databaseId + ", isPostgres=" + isPostgres); if (isPostgres) { databaseAdminClient .createDatabase( databaseAdminClient .newDatabaseBuilder(DatabaseId.of(this.projectId, instanceId, databaseId)) .setDialect(Dialect.POSTGRESQL) .build(), Collections.emptyList()) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .createDatabase( databaseAdminClient .newDatabaseBuilder(DatabaseId.of(this.projectId, instanceId, databaseId)) .build(), Collections.emptyList()) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } private String generateTableName(String prefix) { int maxTableNameLength = MAX_POSTGRES_TABLE_NAME_LENGTH; LOG.info("Max table length: " + maxTableNameLength); return prefix + "_" + RandomStringUtils.randomAlphanumeric(maxTableNameLength - 1 - prefix.length()); } private String generateChangeStreamName() { return CHANGE_STREAM_NAME_PREFIX + "_" + RandomStringUtils.randomAlphanumeric( MAX_CHANGE_STREAM_NAME_LENGTH - 1 - CHANGE_STREAM_NAME_PREFIX.length()); } private void generateDatabaseIds(ChangeStreamTestPipelineOptions options) { int prefixLength = Math.max(options.getDatabaseId().length(), options.getMetadataDatabaseId().length()); prefixLength = Math.max(prefixLength, options.getRbacDatabaseId().length()); String suffix = RandomStringUtils.randomAlphanumeric(MAX_DATABASE_NAME_LENGTH - 1 - prefixLength) .toLowerCase(Locale.ROOT); databaseId = options.getDatabaseId() + "_" + suffix; rbacDatabaseId = options.getRbacDatabaseId() + "_" + suffix; metadataDatabaseId = options.getMetadataDatabaseId() + "_" + suffix; } }
class IntegrationTestEnv extends ExternalResource { private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestEnv.class); private static final int TIMEOUT_MINUTES = 10; private static final int MAX_POSTGRES_TABLE_NAME_LENGTH = 63; private static final int MAX_CHANGE_STREAM_NAME_LENGTH = 30; private static final int MAX_DATABASE_NAME_LENGTH = 30; private static final String METADATA_TABLE_NAME_PREFIX = "TestMetadata"; private static final String SINGERS_TABLE_NAME_PREFIX = "Singers"; private static final String CHANGE_STREAM_NAME_PREFIX = "SingersStream"; private static final String DATABASE_ROLE = "test_role"; private List<String> changeStreams; private List<String> tables; private String projectId; private String instanceId; private String databaseId; private String metadataDatabaseId; private String metadataTableName; private Spanner spanner; private final String host = "https: private DatabaseAdminClient databaseAdminClient; private DatabaseClient databaseClient; private boolean isPostgres; public boolean useSeparateMetadataDb; @Override IntegrationTestEnv() { this.isPostgres = false; } IntegrationTestEnv(boolean isPostgres) { this.isPostgres = true; } @Override protected void after() { for (String changeStream : changeStreams) { try { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList("DROP CHANGE STREAM \"" + changeStream + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList("DROP CHANGE STREAM " + changeStream), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } catch (Exception e) { LOG.error("Failed to drop change stream " + changeStream + ". Skipping...", e); } } for (String table : tables) { try { if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList("DROP TABLE \"" + table + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList("DROP TABLE " + table), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } catch (Exception e) { LOG.error("Failed to drop table " + table + ". Skipping...", e); } } try { databaseAdminClient.dropDatabase(instanceId, databaseId); } catch (Exception e) { LOG.error("Failed to drop database " + databaseId + ". Skipping...", e); } if (useSeparateMetadataDb) { databaseAdminClient.dropDatabase(instanceId, metadataDatabaseId); } spanner.close(); } void createMetadataDatabase() throws ExecutionException, InterruptedException, TimeoutException { recreateDatabase(databaseAdminClient, instanceId, metadataDatabaseId, isPostgres); useSeparateMetadataDb = true; } String createSingersTable() throws InterruptedException, ExecutionException, TimeoutException { final String tableName = generateTableName(SINGERS_TABLE_NAME_PREFIX); LOG.info("Creating table " + tableName); if (this.isPostgres) { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList( "CREATE TABLE \"" + tableName + "\" (" + " \"SingerId\" BIGINT NOT NULL," + " \"FirstName\" text," + " \"LastName\" text," + " \"SingerInfo\" bytea," + " PRIMARY KEY (\"SingerId\")" + ")"), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList( "CREATE TABLE " + tableName + " (" + " SingerId INT64 NOT NULL," + " FirstName STRING(1024)," + " LastName STRING(1024)," + " SingerInfo BYTES(MAX)" + " ) PRIMARY KEY (SingerId)"), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } tables.add(tableName); return tableName; } String createChangeStreamFor(String tableName) throws InterruptedException, ExecutionException, TimeoutException { final String changeStreamName = generateChangeStreamName(); if (this.isPostgres) { LOG.info("CREATE CHANGE STREAM \"" + changeStreamName + "\" FOR \"" + tableName + "\""); databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList( "CREATE CHANGE STREAM \"" + changeStreamName + "\" FOR \"" + tableName + "\""), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Collections.singletonList( "CREATE CHANGE STREAM " + changeStreamName + " FOR " + tableName), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } changeStreams.add(changeStreamName); return changeStreamName; } void createRoleAndGrantPrivileges(String table, String changeStream) throws InterruptedException, ExecutionException, TimeoutException { if (this.isPostgres) { LOG.error("Database roles not supported with Postgres dialect."); return; } databaseAdminClient .updateDatabaseDdl( instanceId, databaseId, Arrays.asList( "CREATE ROLE " + DATABASE_ROLE, "GRANT INSERT, UPDATE, DELETE ON TABLE " + table + " TO ROLE " + DATABASE_ROLE, "GRANT SELECT ON CHANGE STREAM " + changeStream + " TO ROLE " + DATABASE_ROLE, "GRANT EXECUTE ON TABLE FUNCTION READ_" + changeStream + " TO ROLE " + DATABASE_ROLE), null) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); return; } String getProjectId() { return projectId; } String getInstanceId() { return instanceId; } String getDatabaseId() { return databaseId; } String getMetadataDatabaseId() { return metadataDatabaseId; } String getDatabaseRole() { return DATABASE_ROLE; } String getMetadataTableName() { return metadataTableName; } DatabaseClient getDatabaseClient() { return databaseClient; } private void recreateDatabase( DatabaseAdminClient databaseAdminClient, String instanceId, String databaseId, boolean isPostgres) throws ExecutionException, InterruptedException, TimeoutException { databaseAdminClient.dropDatabase(instanceId, databaseId); LOG.info("Creating database " + databaseId + ", isPostgres=" + isPostgres); if (isPostgres) { databaseAdminClient .createDatabase( databaseAdminClient .newDatabaseBuilder(DatabaseId.of(this.projectId, instanceId, databaseId)) .setDialect(Dialect.POSTGRESQL) .build(), Collections.emptyList()) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } else { databaseAdminClient .createDatabase( databaseAdminClient .newDatabaseBuilder(DatabaseId.of(this.projectId, instanceId, databaseId)) .build(), Collections.emptyList()) .get(TIMEOUT_MINUTES, TimeUnit.MINUTES); } } private String generateTableName(String prefix) { int maxTableNameLength = MAX_POSTGRES_TABLE_NAME_LENGTH; LOG.info("Max table length: " + maxTableNameLength); return prefix + "_" + RandomStringUtils.randomAlphanumeric(maxTableNameLength - 1 - prefix.length()); } private String generateChangeStreamName() { return CHANGE_STREAM_NAME_PREFIX + "_" + RandomStringUtils.randomAlphanumeric( MAX_CHANGE_STREAM_NAME_LENGTH - 1 - CHANGE_STREAM_NAME_PREFIX.length()); } private void generateDatabaseIds(ChangeStreamTestPipelineOptions options) { int prefixLength = Math.max(options.getDatabaseId().length(), options.getMetadataDatabaseId().length()); String suffix = RandomStringUtils.randomAlphanumeric(MAX_DATABASE_NAME_LENGTH - 1 - prefixLength) .toLowerCase(Locale.ROOT); databaseId = options.getDatabaseId() + "_" + suffix; metadataDatabaseId = options.getMetadataDatabaseId() + "_" + suffix; } }
I have created the retryClient as per the offline discussion.
public static void initEndpoint(ObjectValue webSocketClient) { @SuppressWarnings(WebSocketConstants.UNCHECKED) MapValue<String, Object> clientEndpointConfig = (MapValue<String, Object>) webSocketClient.getMapValue( HttpConstants.CLIENT_ENDPOINT_CONFIG); Strand strand = Scheduler.getStrand(); String remoteUrl = webSocketClient.getStringValue(WebSocketConstants.CLIENT_URL_CONFIG); WebSocketService wsService = WebSocketUtil.validateAndCreateWebSocketService(strand, clientEndpointConfig); HttpWsConnectorFactory connectorFactory = HttpUtil.createHttpWsConnectionFactory(); WebSocketClientConnectorConfig clientConnectorConfig = new WebSocketClientConnectorConfig(remoteUrl); String scheme = URI.create(remoteUrl).getScheme(); WebSocketUtil.populateClientConnectorConfig(clientEndpointConfig, clientConnectorConfig, scheme); WebSocketClientConnector clientConnector = connectorFactory.createWsClientConnector(clientConnectorConfig); webSocketClient.addNativeData(WebSocketConstants.CONNECTOR_FACTORY, connectorFactory); webSocketClient.addNativeData(WebSocketConstants.CLIENT_CONNECTOR, clientConnector); if (webSocketClient.getType().getName().equalsIgnoreCase(WebSocketConstants.WEBSOCKET_CLIENT)) { if (WebSocketUtil.hasRetryContext(webSocketClient)) { @SuppressWarnings(WebSocketConstants.UNCHECKED) MapValue<String, Object> retryConfig = (MapValue<String, Object>) clientEndpointConfig.getMapValue( WebSocketConstants.RETRY_CONTEXT); RetryContext retryConnectorConfig = new RetryContext(); populateRetryConnectorConfig(retryConfig, retryConnectorConfig); webSocketClient.addNativeData(WebSocketConstants.RETRY_CONTEXT, retryConnectorConfig); webSocketClient.addNativeData(WebSocketConstants.CLIENT_LISTENER, new RetryConnectorListener( new ClientConnectorListener())); } else { webSocketClient.addNativeData(WebSocketConstants.CLIENT_LISTENER, new ClientConnectorListener()); } } CountDownLatch countDownLatch = new CountDownLatch(1); webSocketClient.addNativeData(WebSocketConstants.COUNT_DOWN_LATCH, countDownLatch); WebSocketUtil.establishWebSocketConnection(clientConnector, webSocketClient, wsService); WebSocketUtil.waitForHandshake(countDownLatch); }
new ClientConnectorListener()));
public static void initEndpoint(ObjectValue webSocketClient) { @SuppressWarnings(WebSocketConstants.UNCHECKED) MapValue<String, Object> clientEndpointConfig = (MapValue<String, Object>) webSocketClient.getMapValue( WebSocketConstants.CLIENT_ENDPOINT_CONFIG); Strand strand = Scheduler.getStrand(); String remoteUrl = webSocketClient.getStringValue(WebSocketConstants.CLIENT_URL_CONFIG); WebSocketService wsService = WebSocketUtil.validateAndCreateWebSocketService(strand, clientEndpointConfig); HttpWsConnectorFactory connectorFactory = HttpUtil.createHttpWsConnectionFactory(); WebSocketClientConnectorConfig clientConnectorConfig = new WebSocketClientConnectorConfig(remoteUrl); String scheme = URI.create(remoteUrl).getScheme(); WebSocketUtil.populateClientConnectorConfig(clientEndpointConfig, clientConnectorConfig, scheme); WebSocketClientConnector clientConnector = connectorFactory.createWsClientConnector(clientConnectorConfig); webSocketClient.addNativeData(WebSocketConstants.CONNECTOR_FACTORY, connectorFactory); webSocketClient.addNativeData(WebSocketConstants.CLIENT_CONNECTOR, clientConnector); if (webSocketClient.getNativeData(WebSocketConstants.CLIENT_LISTENER) == null) { webSocketClient.addNativeData(WebSocketConstants.CLIENT_LISTENER, new ClientConnectorListener()); } CountDownLatch countDownLatch = new CountDownLatch(1); webSocketClient.addNativeData(WebSocketConstants.COUNT_DOWN_LATCH, countDownLatch); WebSocketUtil.establishWebSocketConnection(clientConnector, webSocketClient, wsService); WebSocketUtil.waitForHandshake(countDownLatch); }
class InitEndpoint { private static final Logger logger = LoggerFactory.getLogger(InitEndpoint.class); private static final String INTERVAL_IN_MILLIS = "intervalInMillis"; private static final String MAX_WAIT_INTERVAL = "maxWaitIntervalInMillis"; private static final String MAX_COUNT = "maxCount"; private static final String BACK_OF_FACTOR = "backOffFactor"; /** * Populate the retry config. * * @param retryConfig - the retry config * @param retryConnectorConfig - the retry connector config */ private static void populateRetryConnectorConfig(MapValue<String, Object> retryConfig, RetryContext retryConnectorConfig) { retryConnectorConfig.setInterval(WebSocketUtil.getIntValue(retryConfig, INTERVAL_IN_MILLIS, 1000)); retryConnectorConfig.setBackOfFactor(getDoubleValue(retryConfig)); retryConnectorConfig.setMaxInterval(WebSocketUtil.getIntValue(retryConfig, MAX_WAIT_INTERVAL, 30000)); retryConnectorConfig.setMaxAttempts(WebSocketUtil.getIntValue(retryConfig, MAX_COUNT, 0)); } private static Double getDoubleValue(MapValue<String, Object> configs) { double value = Math.toRadians(configs.getFloatValue(BACK_OF_FACTOR)); if (value < 1) { logger.warn("The value set for `backOffFactor` needs to be great than than 1. The `backOffFactor`" + " value is set to {}", 1.0); value = 1.0; } return value; } private InitEndpoint() { } }
class InitEndpoint { private InitEndpoint() { } }
try best to cancel sc job even there is a RPC error
protected void onCancel() { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); long tryTimes = 1; while (true) { try { ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .dropMaterializedIndex(tableId, rollupIndexList, false); for (Map.Entry<Long, Map<Long, Long>> partitionEntry : partitionIdToBaseRollupTabletIdMap.entrySet()) { Long partitionId = partitionEntry.getKey(); Map<Long, Long> rollupTabletIdToBaseTabletId = partitionEntry.getValue(); for (Map.Entry<Long, Long> tabletEntry : rollupTabletIdToBaseTabletId.entrySet()) { Long rollupTabletId = tabletEntry.getKey(); Long baseTabletId = tabletEntry.getValue(); ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .removeSchemaChangeJob(dbId, tableId, baseIndexId, rollupIndexId, partitionId, baseTabletId, rollupTabletId); } LOG.info("Cancel RollupJob. Remove SchemaChangeJob in ms." + "dbId:{}, tableId:{}, rollupIndexId: {} partitionId:{}. tabletSize:{}", dbId, tableId, rollupIndexId, partitionId, rollupTabletIdToBaseTabletId.size()); } break; } catch (Exception e) { LOG.warn("tryTimes:{}, onCancel exception:", tryTimes, e); } sleepSeveralSeconds(); tryTimes++; } LOG.info("onCancel finished, dbId:{}, tableId:{}, jobId:{}, rollupIndexList:{}", dbId, tableId, jobId, rollupIndexList); }
partitionId, baseTabletId, rollupTabletId);
protected void onCancel() { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); long tryTimes = 1; while (true) { try { ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .dropMaterializedIndex(tableId, rollupIndexList, false); for (Map.Entry<Long, Map<Long, Long>> partitionEntry : partitionIdToBaseRollupTabletIdMap.entrySet()) { Long partitionId = partitionEntry.getKey(); Map<Long, Long> rollupTabletIdToBaseTabletId = partitionEntry.getValue(); for (Map.Entry<Long, Long> tabletEntry : rollupTabletIdToBaseTabletId.entrySet()) { Long rollupTabletId = tabletEntry.getKey(); Long baseTabletId = tabletEntry.getValue(); ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .removeSchemaChangeJob(dbId, tableId, baseIndexId, rollupIndexId, partitionId, baseTabletId, rollupTabletId); } LOG.info("Cancel RollupJob. Remove SchemaChangeJob in ms." + "dbId:{}, tableId:{}, rollupIndexId: {} partitionId:{}. tabletSize:{}", dbId, tableId, rollupIndexId, partitionId, rollupTabletIdToBaseTabletId.size()); } break; } catch (Exception e) { LOG.warn("tryTimes:{}, onCancel exception:", tryTimes, e); } sleepSeveralSeconds(); tryTimes++; } LOG.info("onCancel finished, dbId:{}, tableId:{}, jobId:{}, rollupIndexList:{}", dbId, tableId, jobId, rollupIndexList); }
class CloudRollupJobV2 extends RollupJobV2 { private static final Logger LOG = LogManager.getLogger(CloudRollupJobV2.class); public static AlterJobV2 buildCloudRollupJobV2(RollupJobV2 job) throws IllegalAccessException, AnalysisException { CloudRollupJobV2 ret = new CloudRollupJobV2(); List<Field> allFields = new ArrayList<>(); Class tmpClass = RollupJobV2.class; while (tmpClass != null) { allFields.addAll(Arrays.asList(tmpClass.getDeclaredFields())); tmpClass = tmpClass.getSuperclass(); } for (Field field : allFields) { field.setAccessible(true); Annotation annotation = field.getAnnotation(SerializedName.class); if (annotation != null) { field.set(ret, field.get(job)); } } ret.initAnalyzer(); return ret; } private CloudRollupJobV2() {} public CloudRollupJobV2(String rawSql, long jobId, long dbId, long tableId, String tableName, long timeoutMs, long baseIndexId, long rollupIndexId, String baseIndexName, String rollupIndexName, List<Column> rollupSchema, Column whereColumn, int baseSchemaHash, int rollupSchemaHash, KeysType rollupKeysType, short rollupShortKeyColumnCount, OriginStatement origStmt) throws AnalysisException { super(rawSql, jobId, dbId, tableId, tableName, timeoutMs, baseIndexId, rollupIndexId, baseIndexName, rollupIndexName, rollupSchema, whereColumn, baseSchemaHash, rollupSchemaHash, rollupKeysType, rollupShortKeyColumnCount, origStmt); ConnectContext context = ConnectContext.get(); if (context != null) { String clusterName = context.getCloudCluster(); LOG.debug("rollup job add cloud cluster, context not null, cluster: {}", clusterName); if (!Strings.isNullOrEmpty(clusterName)) { setCloudClusterName(clusterName); } } LOG.debug("rollup job add cloud cluster, context {}", context); } @Override protected void onCreateRollupReplicaDone() throws AlterCancelException { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); try { ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .commitMaterializedIndex(dbId, tableId, rollupIndexList, false); } catch (Exception e) { LOG.warn("commitMaterializedIndex Exception:{}", e); throw new AlterCancelException(e.getMessage()); } LOG.info("onCreateRollupReplicaDone finished, dbId:{}, tableId:{}, jobId:{}, rollupIndexList:{}", dbId, tableId, jobId, rollupIndexList); } @Override @Override protected void createRollupReplica() throws AlterCancelException { Database db = Env.getCurrentInternalCatalog() .getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist")); OlapTable tbl; try { tbl = (OlapTable) db.getTableOrMetaException(tableId, Table.TableType.OLAP); } catch (MetaNotFoundException e) { throw new AlterCancelException(e.getMessage()); } long expiration = (createTimeMs + timeoutMs) / 1000; tbl.readLock(); try { Preconditions.checkState(tbl.getState() == OlapTableState.ROLLUP); try { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .prepareMaterializedIndex(tbl.getId(), rollupIndexList, expiration); createRollupReplicaForPartition(tbl); } catch (Exception e) { LOG.warn("createCloudShadowIndexReplica Exception:{}", e); throw new AlterCancelException(e.getMessage()); } } finally { tbl.readUnlock(); } tbl.writeLockOrAlterCancelException(); try { Preconditions.checkState(tbl.getState() == OlapTableState.ROLLUP); addRollupIndexToCatalog(tbl); } finally { tbl.writeUnlock(); } } private void createRollupReplicaForPartition(OlapTable tbl) throws Exception { for (Map.Entry<Long, MaterializedIndex> entry : this.partitionIdToRollupIndex.entrySet()) { long partitionId = entry.getKey(); Partition partition = tbl.getPartition(partitionId); if (partition == null) { continue; } TTabletType tabletType = tbl.getPartitionInfo().getTabletType(partitionId); MaterializedIndex rollupIndex = entry.getValue(); Cloud.CreateTabletsRequest.Builder requestBuilder = Cloud.CreateTabletsRequest.newBuilder(); List<String> rowStoreColumns = tbl.getTableProperty().getCopiedRowStoreColumns(); for (Tablet rollupTablet : rollupIndex.getTablets()) { OlapFile.TabletMetaCloudPB.Builder builder = ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .createTabletMetaBuilder(tableId, rollupIndexId, partitionId, rollupTablet, tabletType, rollupSchemaHash, rollupKeysType, rollupShortKeyColumnCount, tbl.getCopiedBfColumns(), tbl.getBfFpp(), null, rollupSchema, tbl.getDataSortInfo(), tbl.getCompressionType(), tbl.getStoragePolicy(), tbl.isInMemory(), true, tbl.getName(), tbl.getTTLSeconds(), tbl.getEnableUniqueKeyMergeOnWrite(), tbl.storeRowColumn(), tbl.getBaseSchemaVersion(), tbl.getCompactionPolicy(), tbl.getTimeSeriesCompactionGoalSizeMbytes(), tbl.getTimeSeriesCompactionFileCountThreshold(), tbl.getTimeSeriesCompactionTimeThresholdSeconds(), tbl.getTimeSeriesCompactionEmptyRowsetsThreshold(), tbl.getTimeSeriesCompactionLevelThreshold(), tbl.disableAutoCompaction(), tbl.getRowStoreColumnsUniqueIds(rowStoreColumns), tbl.getEnableMowLightDelete(), null, tbl.rowStorePageSize()); requestBuilder.addTabletMetas(builder); } ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .sendCreateTabletsRpc(requestBuilder); } } @Override protected void ensureCloudClusterExist(List<AgentTask> tasks) throws AlterCancelException { if (((CloudSystemInfoService) Env.getCurrentSystemInfo()) .getCloudClusterIdByName(cloudClusterName) == null) { for (AgentTask task : tasks) { task.setFinished(true); AgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ALTER, task.getSignature()); } StringBuilder sb = new StringBuilder("cloud cluster("); sb.append(cloudClusterName); sb.append(") has been removed, jobId="); sb.append(jobId); String msg = sb.toString(); LOG.warn(msg); throw new AlterCancelException(msg); } } @Override protected boolean checkTableStable(Database db) throws AlterCancelException { return true; } }
class CloudRollupJobV2 extends RollupJobV2 { private static final Logger LOG = LogManager.getLogger(CloudRollupJobV2.class); public static AlterJobV2 buildCloudRollupJobV2(RollupJobV2 job) throws IllegalAccessException, AnalysisException { CloudRollupJobV2 ret = new CloudRollupJobV2(); List<Field> allFields = new ArrayList<>(); Class tmpClass = RollupJobV2.class; while (tmpClass != null) { allFields.addAll(Arrays.asList(tmpClass.getDeclaredFields())); tmpClass = tmpClass.getSuperclass(); } for (Field field : allFields) { field.setAccessible(true); Annotation annotation = field.getAnnotation(SerializedName.class); if (annotation != null) { field.set(ret, field.get(job)); } } ret.initAnalyzer(); return ret; } private CloudRollupJobV2() {} public CloudRollupJobV2(String rawSql, long jobId, long dbId, long tableId, String tableName, long timeoutMs, long baseIndexId, long rollupIndexId, String baseIndexName, String rollupIndexName, List<Column> rollupSchema, Column whereColumn, int baseSchemaHash, int rollupSchemaHash, KeysType rollupKeysType, short rollupShortKeyColumnCount, OriginStatement origStmt) throws AnalysisException { super(rawSql, jobId, dbId, tableId, tableName, timeoutMs, baseIndexId, rollupIndexId, baseIndexName, rollupIndexName, rollupSchema, whereColumn, baseSchemaHash, rollupSchemaHash, rollupKeysType, rollupShortKeyColumnCount, origStmt); ConnectContext context = ConnectContext.get(); if (context != null) { String clusterName = context.getCloudCluster(); LOG.debug("rollup job add cloud cluster, context not null, cluster: {}", clusterName); if (!Strings.isNullOrEmpty(clusterName)) { setCloudClusterName(clusterName); } } LOG.debug("rollup job add cloud cluster, context {}", context); } @Override protected void onCreateRollupReplicaDone() throws AlterCancelException { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); try { ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .commitMaterializedIndex(dbId, tableId, rollupIndexList, false); } catch (Exception e) { LOG.warn("commitMaterializedIndex Exception:{}", e); throw new AlterCancelException(e.getMessage()); } LOG.info("onCreateRollupReplicaDone finished, dbId:{}, tableId:{}, jobId:{}, rollupIndexList:{}", dbId, tableId, jobId, rollupIndexList); } @Override @Override protected void createRollupReplica() throws AlterCancelException { Database db = Env.getCurrentInternalCatalog() .getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist")); OlapTable tbl; try { tbl = (OlapTable) db.getTableOrMetaException(tableId, Table.TableType.OLAP); } catch (MetaNotFoundException e) { throw new AlterCancelException(e.getMessage()); } long expiration = (createTimeMs + timeoutMs) / 1000; tbl.readLock(); try { Preconditions.checkState(tbl.getState() == OlapTableState.ROLLUP); try { List<Long> rollupIndexList = new ArrayList<Long>(); rollupIndexList.add(rollupIndexId); ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .prepareMaterializedIndex(tbl.getId(), rollupIndexList, expiration); createRollupReplicaForPartition(tbl); } catch (Exception e) { LOG.warn("createCloudShadowIndexReplica Exception:{}", e); throw new AlterCancelException(e.getMessage()); } } finally { tbl.readUnlock(); } tbl.writeLockOrAlterCancelException(); try { Preconditions.checkState(tbl.getState() == OlapTableState.ROLLUP); addRollupIndexToCatalog(tbl); } finally { tbl.writeUnlock(); } } private void createRollupReplicaForPartition(OlapTable tbl) throws Exception { for (Map.Entry<Long, MaterializedIndex> entry : this.partitionIdToRollupIndex.entrySet()) { long partitionId = entry.getKey(); Partition partition = tbl.getPartition(partitionId); if (partition == null) { continue; } TTabletType tabletType = tbl.getPartitionInfo().getTabletType(partitionId); MaterializedIndex rollupIndex = entry.getValue(); Cloud.CreateTabletsRequest.Builder requestBuilder = Cloud.CreateTabletsRequest.newBuilder(); List<String> rowStoreColumns = tbl.getTableProperty().getCopiedRowStoreColumns(); for (Tablet rollupTablet : rollupIndex.getTablets()) { OlapFile.TabletMetaCloudPB.Builder builder = ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .createTabletMetaBuilder(tableId, rollupIndexId, partitionId, rollupTablet, tabletType, rollupSchemaHash, rollupKeysType, rollupShortKeyColumnCount, tbl.getCopiedBfColumns(), tbl.getBfFpp(), null, rollupSchema, tbl.getDataSortInfo(), tbl.getCompressionType(), tbl.getStoragePolicy(), tbl.isInMemory(), true, tbl.getName(), tbl.getTTLSeconds(), tbl.getEnableUniqueKeyMergeOnWrite(), tbl.storeRowColumn(), tbl.getBaseSchemaVersion(), tbl.getCompactionPolicy(), tbl.getTimeSeriesCompactionGoalSizeMbytes(), tbl.getTimeSeriesCompactionFileCountThreshold(), tbl.getTimeSeriesCompactionTimeThresholdSeconds(), tbl.getTimeSeriesCompactionEmptyRowsetsThreshold(), tbl.getTimeSeriesCompactionLevelThreshold(), tbl.disableAutoCompaction(), tbl.getRowStoreColumnsUniqueIds(rowStoreColumns), tbl.getEnableMowLightDelete(), null, tbl.rowStorePageSize()); requestBuilder.addTabletMetas(builder); } ((CloudInternalCatalog) Env.getCurrentInternalCatalog()) .sendCreateTabletsRpc(requestBuilder); } } @Override protected void ensureCloudClusterExist(List<AgentTask> tasks) throws AlterCancelException { if (((CloudSystemInfoService) Env.getCurrentSystemInfo()) .getCloudClusterIdByName(cloudClusterName) == null) { for (AgentTask task : tasks) { task.setFinished(true); AgentTaskQueue.removeTask(task.getBackendId(), TTaskType.ALTER, task.getSignature()); } StringBuilder sb = new StringBuilder("cloud cluster("); sb.append(cloudClusterName); sb.append(") has been removed, jobId="); sb.append(jobId); String msg = sb.toString(); LOG.warn(msg); throw new AlterCancelException(msg); } } @Override protected boolean checkTableStable(Database db) throws AlterCancelException { return true; } }
Fighting against the IDE, sure I will!
public AgroalDataSource doCreateDataSource(String dataSourceName) { if (!dataSourceSupport.entries.containsKey(dataSourceName)) { throw new IllegalArgumentException("No datasource named '" + dataSourceName + "' exists"); } DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName); DataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName); DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName); LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig = getLegacyDataSourceJdbcBuildTimeConfig( dataSourceName); LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig = getLegacyDataSourceRuntimeConfig(dataSourceName); LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig = getLegacyDataSourceJdbcRuntimeConfig( dataSourceName); DataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName); boolean isLegacy = matchingSupportEntry.isLegacy; if (!isLegacy) { if (!dataSourceJdbcRuntimeConfig.url.isPresent() && legacyDataSourceRuntimeConfig.url.isPresent()) { String errorMessage; if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "`quarkus.datasource.url` is deprecated and will be removed in a future version - it is " + "recommended to switch to `quarkus.datasource.jdbc.url`. See https: } else { errorMessage = "`quarkus.datasource." + dataSourceName + ".url` is deprecated and will be removed in a future version - it is " + "recommended to switch to `quarkus.datasource." + dataSourceName + ".jdbc.url`. See https: } throw new ConfigurationException(errorMessage); } if (!dataSourceJdbcRuntimeConfig.url.isPresent()) { String errorMessage; if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "`quarkus.datasource.jdbc.url` has not been defined"; } else { errorMessage = "`quarkus.datasource.jdbc." + dataSourceName + ".url` has not been defined"; } throw new ConfigurationException(errorMessage); } } else { if (!legacyDataSourceRuntimeConfig.url.isPresent()) { String errorMessage; if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "`quarkus.datasource.url` has not been defined"; } else { errorMessage = "`quarkus.datasource." + dataSourceName + ".url` has not been defined"; } throw new ConfigurationException(errorMessage); } } loadDriversInTCCL(); String resolvedDriverClass = matchingSupportEntry.resolvedDriverClass; Class<?> driver; try { driver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException( "Unable to load the datasource driver " + resolvedDriverClass + " for datasource " + dataSourceName, e); } String resolvedDbKind = matchingSupportEntry.resolvedDbKind; InstanceHandle<AgroalConnectionConfigurer> agroalConnectionConfigurerHandle = Arc.container().instance( AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind)); AgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier(); if (!dataSourceJdbcRuntimeConfig.poolingEnabled) { dataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS); } AgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration(); AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration .connectionFactoryConfiguration(); boolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent; if (!isLegacy) { applyNewConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, mpMetricsPresent); } else { applyLegacyConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, dataSourceRuntimeConfig, legacyDataSourceJdbcBuildTimeConfig, legacyDataSourceRuntimeConfig, legacyDataSourceJdbcRuntimeConfig, mpMetricsPresent); } if (dataSourceSupport.disableSslSupport) { if (agroalConnectionConfigurerHandle.isAvailable()) { agroalConnectionConfigurerHandle.get().disableSslSupport(resolvedDbKind, dataSourceConfiguration); } else { log.warnv("Agroal does not support disabling SSL for database kind: {0}", resolvedDbKind); } } AgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get(); AgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration, new AgroalEventLoggingListener( dataSourceName)); log.debugv("Started datasource {0} connected to {1}", dataSourceName, agroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl()); Collection<AgroalPoolInterceptor> interceptorList = agroalPoolInterceptors .select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName) ? Default.Literal.INSTANCE : new DataSource.DataSourceLiteral(dataSourceName)) .stream().collect(Collectors.toList()); if (!interceptorList.isEmpty()) { dataSource.setPoolInterceptors(interceptorList); } return dataSource; }
dataSourceName));
public AgroalDataSource doCreateDataSource(String dataSourceName) { if (!dataSourceSupport.entries.containsKey(dataSourceName)) { throw new IllegalArgumentException("No datasource named '" + dataSourceName + "' exists"); } DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig = getDataSourceJdbcBuildTimeConfig(dataSourceName); DataSourceRuntimeConfig dataSourceRuntimeConfig = getDataSourceRuntimeConfig(dataSourceName); DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = getDataSourceJdbcRuntimeConfig(dataSourceName); LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig = getLegacyDataSourceJdbcBuildTimeConfig( dataSourceName); LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig = getLegacyDataSourceRuntimeConfig(dataSourceName); LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig = getLegacyDataSourceJdbcRuntimeConfig( dataSourceName); DataSourceSupport.Entry matchingSupportEntry = dataSourceSupport.entries.get(dataSourceName); boolean isLegacy = matchingSupportEntry.isLegacy; if (!isLegacy) { if (!dataSourceJdbcRuntimeConfig.url.isPresent()) { String errorMessage; if (!legacyDataSourceRuntimeConfig.url.isPresent()) { if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "quarkus.datasource.jdbc.url has not been defined"; } else { errorMessage = "quarkus.datasource." + dataSourceName + ".jdbc.url has not been defined"; } } else { if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "Using legacy quarkus.datasource.url with a db-kind is not supported, please use " + " quarkus.datasource.jdbc.url instead. See https: } else { errorMessage = "Using legacy quarkus.datasource." + dataSourceName + ".url with a db-kind is not supported, please use " + "quarkus.datasource." + dataSourceName + ".jdbc.url " + "instead. See https: } } throw new ConfigurationException(errorMessage); } } else { if (!legacyDataSourceRuntimeConfig.url.isPresent()) { String errorMessage; if (DataSourceUtil.isDefault(dataSourceName)) { errorMessage = "quarkus.datasource.url has not been defined"; } else { errorMessage = "quarkus.datasource." + dataSourceName + ".url has not been defined"; } throw new ConfigurationException(errorMessage); } } loadDriversInTCCL(); String resolvedDriverClass = matchingSupportEntry.resolvedDriverClass; Class<?> driver; try { driver = Class.forName(resolvedDriverClass, true, Thread.currentThread().getContextClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException( "Unable to load the datasource driver " + resolvedDriverClass + " for datasource " + dataSourceName, e); } String resolvedDbKind = matchingSupportEntry.resolvedDbKind; InstanceHandle<AgroalConnectionConfigurer> agroalConnectionConfigurerHandle = Arc.container().instance( AgroalConnectionConfigurer.class, new JdbcDriverLiteral(resolvedDbKind)); AgroalDataSourceConfigurationSupplier dataSourceConfiguration = new AgroalDataSourceConfigurationSupplier(); if (!dataSourceJdbcRuntimeConfig.poolingEnabled) { dataSourceConfiguration.dataSourceImplementation(DataSourceImplementation.AGROAL_POOLLESS); } AgroalConnectionPoolConfigurationSupplier poolConfiguration = dataSourceConfiguration.connectionPoolConfiguration(); AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration = poolConfiguration .connectionFactoryConfiguration(); boolean mpMetricsPresent = dataSourceSupport.mpMetricsPresent; if (!isLegacy) { applyNewConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, dataSourceJdbcBuildTimeConfig, dataSourceRuntimeConfig, dataSourceJdbcRuntimeConfig, mpMetricsPresent); } else { applyLegacyConfiguration(dataSourceConfiguration, poolConfiguration, connectionFactoryConfiguration, driver, dataSourceRuntimeConfig, legacyDataSourceJdbcBuildTimeConfig, legacyDataSourceRuntimeConfig, legacyDataSourceJdbcRuntimeConfig, mpMetricsPresent); } if (dataSourceSupport.disableSslSupport) { if (agroalConnectionConfigurerHandle.isAvailable()) { agroalConnectionConfigurerHandle.get().disableSslSupport(resolvedDbKind, dataSourceConfiguration); } else { log.warnv("Agroal does not support disabling SSL for database kind: {0}", resolvedDbKind); } } AgroalDataSourceConfiguration agroalConfiguration = dataSourceConfiguration.get(); AgroalDataSource dataSource = new io.agroal.pool.DataSource(agroalConfiguration, new AgroalEventLoggingListener(dataSourceName)); log.debugv("Started datasource {0} connected to {1}", dataSourceName, agroalConfiguration.connectionPoolConfiguration().connectionFactoryConfiguration().jdbcUrl()); Collection<AgroalPoolInterceptor> interceptorList = agroalPoolInterceptors .select(dataSourceName == null || DataSourceUtil.isDefault(dataSourceName) ? Default.Literal.INSTANCE : new DataSource.DataSourceLiteral(dataSourceName)) .stream().collect(Collectors.toList()); if (!interceptorList.isEmpty()) { dataSource.setPoolInterceptors(interceptorList); } return dataSource; }
class DataSources { private static final Logger log = Logger.getLogger(DataSources.class.getName()); public static final String DEPRECATED_URL_PROPERTY_NAME_ERROR_MESSAGE_FORMAT = "`quarkus.datasource.%s.url` is deprecated and will be removed in a future version - it is " + "recommended to switch to `quarkus.datasource.%s.jdbc.url`. See https: public static final String DEPRECATED_URL_PROPERTY_ERROR_MESSAGE = "`quarkus.datasource.url` is deprecated and will be removed in a future version - it is " + "recommended to switch to `quarkus.datasource.jdbc.url`. See https: public static final String DATASOURCE_JDBC_URL_HAS_NOT_BEEN_DEFINED = "`quarkus.datasource.jdbc.url` has not been defined"; public static final String DATASOURCE_JDBC_S_URL_HAS_NOT_BEEN_DEFINED_MESSAGE_FORMAT = "`quarkus.datasource.jdbc.%s.url` has not been defined"; private final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig; private final DataSourcesRuntimeConfig dataSourcesRuntimeConfig; private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig; private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig; private final LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig; private final LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig; private final LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig; private final TransactionManager transactionManager; private final TransactionSynchronizationRegistry transactionSynchronizationRegistry; private final DataSourceSupport dataSourceSupport; private final Instance<AgroalPoolInterceptor> agroalPoolInterceptors; private final ConcurrentMap<String, AgroalDataSource> dataSources = new ConcurrentHashMap<>(); public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig, LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig, LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig, LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig, TransactionManager transactionManager, TransactionSynchronizationRegistry transactionSynchronizationRegistry, DataSourceSupport dataSourceSupport, @Any Instance<AgroalPoolInterceptor> agroalPoolInterceptors) { this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig; this.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig; this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig; this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig; this.legacyDataSourcesJdbcBuildTimeConfig = legacyDataSourcesJdbcBuildTimeConfig; this.legacyDataSourcesRuntimeConfig = legacyDataSourcesRuntimeConfig; this.legacyDataSourcesJdbcRuntimeConfig = legacyDataSourcesJdbcRuntimeConfig; this.transactionManager = transactionManager; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; this.dataSourceSupport = dataSourceSupport; this.agroalPoolInterceptors = agroalPoolInterceptors; } /** * Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}. * In such using {@code Arc.container.instance(DataSource.class)} is not possible because * {@code Datasource} is itself a synthetic bean. * <p> * This method relies on the fact that {@code DataSources} should - given the same input - * always return the same {@code AgroalDataSource} no matter how many times it is invoked * (which makes sense because {@code DataSource} is a {@code Singleton} bean). * <p> * This method is thread-safe */ public static AgroalDataSource fromName(String dataSourceName) { return Arc.container().instance(DataSources.class).get() .getDataSource(dataSourceName); } public AgroalDataSource getDataSource(String dataSourceName) { return dataSources.computeIfAbsent(dataSourceName, new Function<String, AgroalDataSource>() { @Override public AgroalDataSource apply(String s) { return doCreateDataSource(s); } }); } private void applyNewConfiguration(AgroalDataSourceConfigurationSupplier dataSourceConfiguration, AgroalConnectionPoolConfigurationSupplier poolConfiguration, AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig, DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, boolean mpMetricsPresent) { connectionFactoryConfiguration.jdbcUrl(dataSourceJdbcRuntimeConfig.url.get()); connectionFactoryConfiguration.connectionProviderClass(driver); connectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks); if (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) { connectionFactoryConfiguration .jdbcTransactionIsolation( dataSourceJdbcRuntimeConfig.transactionIsolationLevel.get()); } if (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, transactionSynchronizationRegistry); poolConfiguration.transactionIntegration(txIntegration); } if (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) { connectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get()); } if (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) { dataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get()); } else { dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent); } if (dataSourceRuntimeConfig.username.isPresent()) { connectionFactoryConfiguration .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); } if (dataSourceRuntimeConfig.password.isPresent()) { connectionFactoryConfiguration .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); } if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) { String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); String name = dataSourceRuntimeConfig.credentialsProvider.get(); connectionFactoryConfiguration .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); } poolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize); poolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize); if (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) { poolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt()); } poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator()); if (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) { poolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get()); } if (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) { poolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get()); } if (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) { String validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get(); poolConfiguration.connectionValidator(new ConnectionValidator() { @Override public boolean isValid(Connection connection) { try (Statement stmt = connection.createStatement()) { stmt.execute(validationQuery); return true; } catch (Exception e) { log.warn("Connection validation failed", e); } return false; } }); } if (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) { poolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get()); } if (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) { poolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get()); } if (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) { poolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get()); } } private void applyLegacyConfiguration(AgroalDataSourceConfigurationSupplier dataSourceConfiguration, AgroalConnectionPoolConfigurationSupplier poolConfiguration, AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, DataSourceRuntimeConfig dataSourceRuntimeConfig, LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig, LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig, LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig, boolean mpMetricsPresent) { connectionFactoryConfiguration.jdbcUrl(legacyDataSourceRuntimeConfig.url.get()); connectionFactoryConfiguration.connectionProviderClass(driver); connectionFactoryConfiguration.trackJdbcResources(legacyDataSourceJdbcRuntimeConfig.detectStatementLeaks); if (legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) { connectionFactoryConfiguration .jdbcTransactionIsolation( legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.get()); } if (legacyDataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, transactionSynchronizationRegistry); poolConfiguration.transactionIntegration(txIntegration); } if (legacyDataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) { connectionFactoryConfiguration.initialSql(legacyDataSourceJdbcRuntimeConfig.newConnectionSql.get()); } if (legacyDataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) { dataSourceConfiguration.metricsEnabled(legacyDataSourceJdbcBuildTimeConfig.enableMetrics.get()); } else { dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent); } if (dataSourceRuntimeConfig.username.isPresent()) { connectionFactoryConfiguration .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); } if (dataSourceRuntimeConfig.password.isPresent()) { connectionFactoryConfiguration .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); } if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) { String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); String name = dataSourceRuntimeConfig.credentialsProvider.get(); connectionFactoryConfiguration .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); } poolConfiguration.minSize(legacyDataSourceJdbcRuntimeConfig.minSize); poolConfiguration.maxSize(legacyDataSourceRuntimeConfig.maxSize); if (legacyDataSourceJdbcRuntimeConfig.initialSize.isPresent() && legacyDataSourceJdbcRuntimeConfig.initialSize.get() > 0) { poolConfiguration.initialSize(legacyDataSourceJdbcRuntimeConfig.initialSize.get()); } poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator()); if (legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) { poolConfiguration.acquisitionTimeout(legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.get()); } if (legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) { poolConfiguration.validationTimeout(legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) { String validationQuery = legacyDataSourceJdbcRuntimeConfig.validationQuerySql.get(); poolConfiguration.connectionValidator(new ConnectionValidator() { @Override public boolean isValid(Connection connection) { try (Statement stmt = connection.createStatement()) { stmt.execute(validationQuery); return true; } catch (Exception e) { log.warn("Connection validation failed", e); } return false; } }); } if (legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) { poolConfiguration.reapTimeout(legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) { poolConfiguration.leakTimeout(legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) { poolConfiguration.maxLifetime(legacyDataSourceJdbcRuntimeConfig.maxLifetime.get()); } } public DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesBuildTimeConfig.defaultDataSource; } DataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig(); } public DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesJdbcBuildTimeConfig.jdbc; } DataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources .get(dataSourceName); return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig(); } public DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesRuntimeConfig.defaultDataSource; } DataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new DataSourceRuntimeConfig(); } public DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesJdbcRuntimeConfig.jdbc; } DataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources .get(dataSourceName); return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig(); } public LegacyDataSourceJdbcBuildTimeConfig getLegacyDataSourceJdbcBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource; } LegacyDataSourceJdbcBuildTimeConfig namedConfig = legacyDataSourcesJdbcBuildTimeConfig.namedDataSources .get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcBuildTimeConfig(); } public LegacyDataSourceRuntimeConfig getLegacyDataSourceRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesRuntimeConfig.defaultDataSource; } LegacyDataSourceRuntimeConfig namedConfig = legacyDataSourcesRuntimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceRuntimeConfig(); } public LegacyDataSourceJdbcRuntimeConfig getLegacyDataSourceJdbcRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesJdbcRuntimeConfig.defaultDataSource; } LegacyDataSourceJdbcRuntimeConfig namedConfig = legacyDataSourcesJdbcRuntimeConfig.namedDataSources .get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcRuntimeConfig(); } /** * Uses the {@link ServiceLoader * of the current {@link Thread */ private static void loadDriversInTCCL() { final ServiceLoader<Driver> drivers = ServiceLoader.load(Driver.class); final Iterator<Driver> iterator = drivers.iterator(); while (iterator.hasNext()) { try { iterator.next(); } catch (Throwable t) { } } } @PreDestroy public void stop() { for (AgroalDataSource dataSource : dataSources.values()) { if (dataSource != null) { dataSource.close(); } } } }
class DataSources { private static final Logger log = Logger.getLogger(DataSources.class.getName()); private final DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig; private final DataSourcesRuntimeConfig dataSourcesRuntimeConfig; private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig; private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig; private final LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig; private final LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig; private final LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig; private final TransactionManager transactionManager; private final TransactionSynchronizationRegistry transactionSynchronizationRegistry; private final DataSourceSupport dataSourceSupport; private final Instance<AgroalPoolInterceptor> agroalPoolInterceptors; private final ConcurrentMap<String, AgroalDataSource> dataSources = new ConcurrentHashMap<>(); public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig, LegacyDataSourcesJdbcBuildTimeConfig legacyDataSourcesJdbcBuildTimeConfig, LegacyDataSourcesRuntimeConfig legacyDataSourcesRuntimeConfig, LegacyDataSourcesJdbcRuntimeConfig legacyDataSourcesJdbcRuntimeConfig, TransactionManager transactionManager, TransactionSynchronizationRegistry transactionSynchronizationRegistry, DataSourceSupport dataSourceSupport, @Any Instance<AgroalPoolInterceptor> agroalPoolInterceptors) { this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig; this.dataSourcesRuntimeConfig = dataSourcesRuntimeConfig; this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig; this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig; this.legacyDataSourcesJdbcBuildTimeConfig = legacyDataSourcesJdbcBuildTimeConfig; this.legacyDataSourcesRuntimeConfig = legacyDataSourcesRuntimeConfig; this.legacyDataSourcesJdbcRuntimeConfig = legacyDataSourcesJdbcRuntimeConfig; this.transactionManager = transactionManager; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; this.dataSourceSupport = dataSourceSupport; this.agroalPoolInterceptors = agroalPoolInterceptors; } /** * Meant to be used from recorders that create synthetic beans that need access to {@code Datasource}. * In such using {@code Arc.container.instance(DataSource.class)} is not possible because * {@code Datasource} is itself a synthetic bean. * <p> * This method relies on the fact that {@code DataSources} should - given the same input - * always return the same {@code AgroalDataSource} no matter how many times it is invoked * (which makes sense because {@code DataSource} is a {@code Singleton} bean). * <p> * This method is thread-safe */ public static AgroalDataSource fromName(String dataSourceName) { return Arc.container().instance(DataSources.class).get() .getDataSource(dataSourceName); } public AgroalDataSource getDataSource(String dataSourceName) { return dataSources.computeIfAbsent(dataSourceName, new Function<String, AgroalDataSource>() { @Override public AgroalDataSource apply(String s) { return doCreateDataSource(s); } }); } private void applyNewConfiguration(AgroalDataSourceConfigurationSupplier dataSourceConfiguration, AgroalConnectionPoolConfigurationSupplier poolConfiguration, AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, DataSourceJdbcBuildTimeConfig dataSourceJdbcBuildTimeConfig, DataSourceRuntimeConfig dataSourceRuntimeConfig, DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig, boolean mpMetricsPresent) { connectionFactoryConfiguration.jdbcUrl(dataSourceJdbcRuntimeConfig.url.get()); connectionFactoryConfiguration.connectionProviderClass(driver); connectionFactoryConfiguration.trackJdbcResources(dataSourceJdbcRuntimeConfig.detectStatementLeaks); if (dataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) { connectionFactoryConfiguration .jdbcTransactionIsolation( dataSourceJdbcRuntimeConfig.transactionIsolationLevel.get()); } if (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, transactionSynchronizationRegistry); poolConfiguration.transactionIntegration(txIntegration); } if (dataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) { connectionFactoryConfiguration.initialSql(dataSourceJdbcRuntimeConfig.newConnectionSql.get()); } if (dataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) { dataSourceConfiguration.metricsEnabled(dataSourceJdbcBuildTimeConfig.enableMetrics.get()); } else { dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent); } if (dataSourceRuntimeConfig.username.isPresent()) { connectionFactoryConfiguration .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); } if (dataSourceRuntimeConfig.password.isPresent()) { connectionFactoryConfiguration .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); } if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) { String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); String name = dataSourceRuntimeConfig.credentialsProvider.get(); connectionFactoryConfiguration .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); } poolConfiguration.minSize(dataSourceJdbcRuntimeConfig.minSize); poolConfiguration.maxSize(dataSourceJdbcRuntimeConfig.maxSize); if (dataSourceJdbcRuntimeConfig.initialSize.isPresent() && dataSourceJdbcRuntimeConfig.initialSize.getAsInt() > 0) { poolConfiguration.initialSize(dataSourceJdbcRuntimeConfig.initialSize.getAsInt()); } poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator()); if (dataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) { poolConfiguration.acquisitionTimeout(dataSourceJdbcRuntimeConfig.acquisitionTimeout.get()); } if (dataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) { poolConfiguration.validationTimeout(dataSourceJdbcRuntimeConfig.backgroundValidationInterval.get()); } if (dataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) { String validationQuery = dataSourceJdbcRuntimeConfig.validationQuerySql.get(); poolConfiguration.connectionValidator(new ConnectionValidator() { @Override public boolean isValid(Connection connection) { try (Statement stmt = connection.createStatement()) { stmt.execute(validationQuery); return true; } catch (Exception e) { log.warn("Connection validation failed", e); } return false; } }); } if (dataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) { poolConfiguration.reapTimeout(dataSourceJdbcRuntimeConfig.idleRemovalInterval.get()); } if (dataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) { poolConfiguration.leakTimeout(dataSourceJdbcRuntimeConfig.leakDetectionInterval.get()); } if (dataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) { poolConfiguration.maxLifetime(dataSourceJdbcRuntimeConfig.maxLifetime.get()); } } private void applyLegacyConfiguration(AgroalDataSourceConfigurationSupplier dataSourceConfiguration, AgroalConnectionPoolConfigurationSupplier poolConfiguration, AgroalConnectionFactoryConfigurationSupplier connectionFactoryConfiguration, Class<?> driver, DataSourceRuntimeConfig dataSourceRuntimeConfig, LegacyDataSourceJdbcBuildTimeConfig legacyDataSourceJdbcBuildTimeConfig, LegacyDataSourceRuntimeConfig legacyDataSourceRuntimeConfig, LegacyDataSourceJdbcRuntimeConfig legacyDataSourceJdbcRuntimeConfig, boolean mpMetricsPresent) { connectionFactoryConfiguration.jdbcUrl(legacyDataSourceRuntimeConfig.url.get()); connectionFactoryConfiguration.connectionProviderClass(driver); connectionFactoryConfiguration.trackJdbcResources(legacyDataSourceJdbcRuntimeConfig.detectStatementLeaks); if (legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.isPresent()) { connectionFactoryConfiguration .jdbcTransactionIsolation( legacyDataSourceJdbcRuntimeConfig.transactionIsolationLevel.get()); } if (legacyDataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, transactionSynchronizationRegistry); poolConfiguration.transactionIntegration(txIntegration); } if (legacyDataSourceJdbcRuntimeConfig.newConnectionSql.isPresent()) { connectionFactoryConfiguration.initialSql(legacyDataSourceJdbcRuntimeConfig.newConnectionSql.get()); } if (legacyDataSourceJdbcBuildTimeConfig.enableMetrics.isPresent()) { dataSourceConfiguration.metricsEnabled(legacyDataSourceJdbcBuildTimeConfig.enableMetrics.get()); } else { dataSourceConfiguration.metricsEnabled(dataSourcesBuildTimeConfig.metricsEnabled && mpMetricsPresent); } if (dataSourceRuntimeConfig.username.isPresent()) { connectionFactoryConfiguration .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); } if (dataSourceRuntimeConfig.password.isPresent()) { connectionFactoryConfiguration .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); } if (dataSourceRuntimeConfig.credentialsProvider.isPresent()) { String beanName = dataSourceRuntimeConfig.credentialsProviderName.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); String name = dataSourceRuntimeConfig.credentialsProvider.get(); connectionFactoryConfiguration .credential(new AgroalVaultCredentialsProviderPassword(name, credentialsProvider)); } poolConfiguration.minSize(legacyDataSourceJdbcRuntimeConfig.minSize); poolConfiguration.maxSize(legacyDataSourceRuntimeConfig.maxSize); if (legacyDataSourceJdbcRuntimeConfig.initialSize.isPresent() && legacyDataSourceJdbcRuntimeConfig.initialSize.get() > 0) { poolConfiguration.initialSize(legacyDataSourceJdbcRuntimeConfig.initialSize.get()); } poolConfiguration.connectionValidator(ConnectionValidator.defaultValidator()); if (legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.isPresent()) { poolConfiguration.acquisitionTimeout(legacyDataSourceJdbcRuntimeConfig.acquisitionTimeout.get()); } if (legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.isPresent()) { poolConfiguration.validationTimeout(legacyDataSourceJdbcRuntimeConfig.backgroundValidationInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.validationQuerySql.isPresent()) { String validationQuery = legacyDataSourceJdbcRuntimeConfig.validationQuerySql.get(); poolConfiguration.connectionValidator(new ConnectionValidator() { @Override public boolean isValid(Connection connection) { try (Statement stmt = connection.createStatement()) { stmt.execute(validationQuery); return true; } catch (Exception e) { log.warn("Connection validation failed", e); } return false; } }); } if (legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.isPresent()) { poolConfiguration.reapTimeout(legacyDataSourceJdbcRuntimeConfig.idleRemovalInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.isPresent()) { poolConfiguration.leakTimeout(legacyDataSourceJdbcRuntimeConfig.leakDetectionInterval.get()); } if (legacyDataSourceJdbcRuntimeConfig.maxLifetime.isPresent()) { poolConfiguration.maxLifetime(legacyDataSourceJdbcRuntimeConfig.maxLifetime.get()); } } public DataSourceBuildTimeConfig getDataSourceBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesBuildTimeConfig.defaultDataSource; } DataSourceBuildTimeConfig namedConfig = dataSourcesBuildTimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new DataSourceBuildTimeConfig(); } public DataSourceJdbcBuildTimeConfig getDataSourceJdbcBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesJdbcBuildTimeConfig.jdbc; } DataSourceJdbcOuterNamedBuildTimeConfig namedOuterConfig = dataSourcesJdbcBuildTimeConfig.namedDataSources .get(dataSourceName); return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcBuildTimeConfig(); } public DataSourceRuntimeConfig getDataSourceRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesRuntimeConfig.defaultDataSource; } DataSourceRuntimeConfig namedConfig = dataSourcesRuntimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new DataSourceRuntimeConfig(); } public DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return dataSourcesJdbcRuntimeConfig.jdbc; } DataSourceJdbcOuterNamedRuntimeConfig namedOuterConfig = dataSourcesJdbcRuntimeConfig.namedDataSources .get(dataSourceName); return namedOuterConfig != null ? namedOuterConfig.jdbc : new DataSourceJdbcRuntimeConfig(); } public LegacyDataSourceJdbcBuildTimeConfig getLegacyDataSourceJdbcBuildTimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesJdbcBuildTimeConfig.defaultDataSource; } LegacyDataSourceJdbcBuildTimeConfig namedConfig = legacyDataSourcesJdbcBuildTimeConfig.namedDataSources .get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcBuildTimeConfig(); } public LegacyDataSourceRuntimeConfig getLegacyDataSourceRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesRuntimeConfig.defaultDataSource; } LegacyDataSourceRuntimeConfig namedConfig = legacyDataSourcesRuntimeConfig.namedDataSources.get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceRuntimeConfig(); } public LegacyDataSourceJdbcRuntimeConfig getLegacyDataSourceJdbcRuntimeConfig(String dataSourceName) { if (DataSourceUtil.isDefault(dataSourceName)) { return legacyDataSourcesJdbcRuntimeConfig.defaultDataSource; } LegacyDataSourceJdbcRuntimeConfig namedConfig = legacyDataSourcesJdbcRuntimeConfig.namedDataSources .get(dataSourceName); return namedConfig != null ? namedConfig : new LegacyDataSourceJdbcRuntimeConfig(); } /** * Uses the {@link ServiceLoader * of the current {@link Thread */ private static void loadDriversInTCCL() { final ServiceLoader<Driver> drivers = ServiceLoader.load(Driver.class); final Iterator<Driver> iterator = drivers.iterator(); while (iterator.hasNext()) { try { iterator.next(); } catch (Throwable t) { } } } @PreDestroy public void stop() { for (AgroalDataSource dataSource : dataSources.values()) { if (dataSource != null) { dataSource.close(); } } } }
Probably not because for `Cache#get()` we only wrap the checked exceptions.
public CompletableFuture<Object> apply(Object key) { return (CompletableFuture<Object>) valueLoader.apply((K) key) .map(i -> NullValueConverter.toCacheValue(i)) .subscribeAsCompletionStage(); }
return (CompletableFuture<Object>) valueLoader.apply((K) key)
public CompletableFuture<Object> apply(Object key) { return valueLoader.apply((K) key) .map(TO_CACHE_VALUE) .subscribeAsCompletionStage(); }
class CaffeineCacheImpl extends AbstractCache implements CaffeineCache { private static final Logger LOGGER = Logger.getLogger(CaffeineCacheImpl.class); final AsyncCache<Object, Object> cache; private final CaffeineCacheInfo cacheInfo; private final StatsCounter statsCounter; public CaffeineCacheImpl(CaffeineCacheInfo cacheInfo, boolean recordStats) { this.cacheInfo = cacheInfo; Caffeine<Object, Object> builder = Caffeine.newBuilder(); if (cacheInfo.initialCapacity != null) { builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { builder.expireAfterAccess(cacheInfo.expireAfterAccess); } if (recordStats) { LOGGER.tracef("Recording Caffeine stats for cache [%s]", cacheInfo.name); statsCounter = new ConcurrentStatsCounter(); builder.recordStats(new Supplier<StatsCounter>() { @Override public StatsCounter get() { return statsCounter; } }); } else { LOGGER.tracef("Caffeine stats recording is disabled for cache [%s]", cacheInfo.name); statsCounter = StatsCounter.disabledStatsCounter(); } cache = builder.buildAsync(); } @Override public String getName() { return cacheInfo.name; } @Override public <K, V> Uni<V> get(K key, Function<K, V> valueLoader) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return Uni.createFrom().completionStage( /* * Even if CompletionStage is eager, the Supplier used below guarantees that the cache value computation will be * delayed until subscription time. In other words, the cache value computation is done lazily. */ new Supplier<CompletionStage<V>>() { @Override public CompletionStage<V> get() { CompletionStage<Object> caffeineValue = getFromCaffeine(key, valueLoader); return cast(caffeineValue); } }); } @SuppressWarnings("unchecked") @Override public <K, V> Uni<V> getAsync(K key, Function<K, Uni<V>> valueLoader) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return (Uni<V>) Uni.createFrom() .completionStage(cache.asMap().computeIfAbsent(key, new Function<Object, CompletableFuture<Object>>() { @Override })); } @Override public <V> CompletableFuture<V> getIfPresent(Object key) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); CompletableFuture<Object> existingCacheValue = cache.getIfPresent(key); if (existingCacheValue == null) { statsCounter.recordMisses(1); return null; } else { LOGGER.tracef("Key [%s] found in cache [%s]", key, cacheInfo.name); statsCounter.recordHits(1); return unwrapCacheValueOrThrowable(existingCacheValue) .thenApply(new Function<>() { @Override public V apply(Object value) { try { return (V) value; } catch (ClassCastException e) { throw new CacheException("An existing cached value type does not match the requested type", e); } } }); } } /** * Returns a {@link CompletableFuture} holding the cache value identified by {@code key}, obtaining that value from * {@code valueLoader} if necessary. The value computation is done synchronously on the calling thread and the * {@link CompletableFuture} is immediately completed before being returned. * * @param key cache key * @param valueLoader function used to compute the cache value if {@code key} is not already associated with a value * @return a {@link CompletableFuture} holding the cache value * @throws CacheException if an exception is thrown during the cache value computation */ private <K, V> CompletableFuture<Object> getFromCaffeine(K key, Function<K, V> valueLoader) { CompletableFuture<Object> newCacheValue = new CompletableFuture<>(); CompletableFuture<Object> existingCacheValue = cache.asMap().putIfAbsent(key, newCacheValue); if (existingCacheValue == null) { statsCounter.recordMisses(1); try { Object value = valueLoader.apply(key); newCacheValue.complete(NullValueConverter.toCacheValue(value)); } catch (Throwable t) { cache.asMap().remove(key, newCacheValue); newCacheValue.complete(new CaffeineComputationThrowable(t)); } return unwrapCacheValueOrThrowable(newCacheValue); } else { LOGGER.tracef("Key [%s] found in cache [%s]", key, cacheInfo.name); statsCounter.recordHits(1); return unwrapCacheValueOrThrowable(existingCacheValue); } } private CompletableFuture<Object> unwrapCacheValueOrThrowable(CompletableFuture<Object> cacheValue) { return cacheValue.thenApply(new Function<>() { @Override public Object apply(Object value) { if (value instanceof CaffeineComputationThrowable) { Throwable cause = ((CaffeineComputationThrowable) value).getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } else { throw new CacheException(cause); } } else { return NullValueConverter.fromCacheValue(value); } } }); } @Override public Uni<Void> invalidate(Object key) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.synchronous().invalidate(key); return null; } }); } @Override public Uni<Void> invalidateAll() { return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.synchronous().invalidateAll(); return null; } }); } @Override public Uni<Void> replaceUniValue(Object key, Object emittedValue) { return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.asMap().computeIfPresent(key, new BiFunction<Object, CompletableFuture<Object>, CompletableFuture<Object>>() { @Override public CompletableFuture<Object> apply(Object k, CompletableFuture<Object> currentValue) { LOGGER.debugf("Replacing Uni value entry with key [%s] into cache [%s]", key, cacheInfo.name); /* * The following computed value will always replace the current cache value (whether it is an * UnresolvedUniValue or not) if this method is called multiple times with the same key. */ return CompletableFuture.completedFuture(NullValueConverter.toCacheValue(emittedValue)); } }); return null; } }); } @Override public Set<Object> keySet() { return Collections.unmodifiableSet(new HashSet<>(cache.asMap().keySet())); } @Override public <V> void put(Object key, CompletableFuture<V> valueFuture) { cache.put(key, (CompletableFuture<Object>) valueFuture); } @Override public void setExpireAfterWrite(Duration duration) { Optional<FixedExpiration<Object, Object>> fixedExpiration = cache.synchronous().policy().expireAfterWrite(); if (fixedExpiration.isPresent()) { fixedExpiration.get().setExpiresAfter(duration); cacheInfo.expireAfterWrite = duration; } else { throw new IllegalStateException("The write-based expiration policy can only be changed if the cache was " + "constructed with an expire-after-write configuration value"); } } @Override public void setExpireAfterAccess(Duration duration) { Optional<FixedExpiration<Object, Object>> fixedExpiration = cache.synchronous().policy().expireAfterAccess(); if (fixedExpiration.isPresent()) { fixedExpiration.get().setExpiresAfter(duration); cacheInfo.expireAfterAccess = duration; } else { throw new IllegalStateException("The access-based expiration policy can only be changed if the cache was " + "constructed with an expire-after-access configuration value"); } } @Override public void setMaximumSize(long maximumSize) { Optional<Policy.Eviction<Object, Object>> eviction = cache.synchronous().policy().eviction(); if (eviction.isPresent()) { eviction.get().setMaximum(maximumSize); cacheInfo.maximumSize = maximumSize; } else { throw new IllegalStateException("The maximum size can only be changed if the cache was constructed with a " + "maximum-size configuration value"); } } public CaffeineCacheInfo getCacheInfo() { return cacheInfo; } public long getSize() { return cache.synchronous().estimatedSize(); } @SuppressWarnings("unchecked") private <T> T cast(Object value) { try { return (T) value; } catch (ClassCastException e) { throw new CacheException( "An existing cached value type does not match the type returned by the value loading function", e); } } }
class CaffeineCacheImpl extends AbstractCache implements CaffeineCache { private static final Logger LOGGER = Logger.getLogger(CaffeineCacheImpl.class); final AsyncCache<Object, Object> cache; private final CaffeineCacheInfo cacheInfo; private final StatsCounter statsCounter; private final boolean recordStats; public CaffeineCacheImpl(CaffeineCacheInfo cacheInfo, boolean recordStats) { this.cacheInfo = cacheInfo; Caffeine<Object, Object> builder = Caffeine.newBuilder(); if (cacheInfo.initialCapacity != null) { builder.initialCapacity(cacheInfo.initialCapacity); } if (cacheInfo.maximumSize != null) { builder.maximumSize(cacheInfo.maximumSize); } if (cacheInfo.expireAfterWrite != null) { builder.expireAfterWrite(cacheInfo.expireAfterWrite); } if (cacheInfo.expireAfterAccess != null) { builder.expireAfterAccess(cacheInfo.expireAfterAccess); } this.recordStats = recordStats; if (recordStats) { LOGGER.tracef("Recording Caffeine stats for cache [%s]", cacheInfo.name); statsCounter = new ConcurrentStatsCounter(); builder.recordStats(new Supplier<StatsCounter>() { @Override public StatsCounter get() { return statsCounter; } }); } else { LOGGER.tracef("Caffeine stats recording is disabled for cache [%s]", cacheInfo.name); statsCounter = StatsCounter.disabledStatsCounter(); } cache = builder.buildAsync(); } @Override public String getName() { return cacheInfo.name; } @Override public <K, V> Uni<V> get(K key, Function<K, V> valueLoader) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return Uni.createFrom().completionStage( /* * Even if CompletionStage is eager, the Supplier used below guarantees that the cache value computation will be * delayed until subscription time. In other words, the cache value computation is done lazily. */ new Supplier<CompletionStage<V>>() { @Override public CompletionStage<V> get() { CompletionStage<Object> caffeineValue = getFromCaffeine(key, valueLoader); return cast(caffeineValue); } }); } @Override public <K, V> Uni<V> getAsync(K key, Function<K, Uni<V>> valueLoader) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return Uni.createFrom() .completionStage(new Supplier<CompletionStage<V>>() { @Override public CompletionStage<V> get() { return recordStats ? computeWithStats(key, valueLoader) : computeWithoutStats(key, valueLoader); } }).map(fromCacheValue()); } @Override public <V> CompletableFuture<V> getIfPresent(Object key) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); CompletableFuture<Object> existingCacheValue = cache.getIfPresent(key); if (existingCacheValue == null) { statsCounter.recordMisses(1); return null; } else { LOGGER.tracef("Key [%s] found in cache [%s]", key, cacheInfo.name); statsCounter.recordHits(1); return unwrapCacheValueOrThrowable(existingCacheValue) .thenApply(new Function<>() { @Override public V apply(Object value) { try { return (V) value; } catch (ClassCastException e) { throw new CacheException("An existing cached value type does not match the requested type", e); } } }); } } /** * Returns a {@link CompletableFuture} holding the cache value identified by {@code key}, obtaining that value from * {@code valueLoader} if necessary. The value computation is done synchronously on the calling thread and the * {@link CompletableFuture} is immediately completed before being returned. * * @param key cache key * @param valueLoader function used to compute the cache value if {@code key} is not already associated with a value * @return a {@link CompletableFuture} holding the cache value * @throws CacheException if an exception is thrown during the cache value computation */ private <K, V> CompletableFuture<Object> getFromCaffeine(K key, Function<K, V> valueLoader) { CompletableFuture<Object> newCacheValue = new CompletableFuture<>(); CompletableFuture<Object> existingCacheValue = cache.asMap().putIfAbsent(key, newCacheValue); if (existingCacheValue == null) { statsCounter.recordMisses(1); try { Object value = valueLoader.apply(key); newCacheValue.complete(NullValueConverter.toCacheValue(value)); } catch (Throwable t) { cache.asMap().remove(key, newCacheValue); newCacheValue.complete(new CaffeineComputationThrowable(t)); } return unwrapCacheValueOrThrowable(newCacheValue); } else { LOGGER.tracef("Key [%s] found in cache [%s]", key, cacheInfo.name); statsCounter.recordHits(1); return unwrapCacheValueOrThrowable(existingCacheValue); } } private CompletableFuture<Object> unwrapCacheValueOrThrowable(CompletableFuture<Object> cacheValue) { return cacheValue.thenApply(new Function<>() { @Override public Object apply(Object value) { if (value instanceof CaffeineComputationThrowable) { Throwable cause = ((CaffeineComputationThrowable) value).getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } else { throw new CacheException(cause); } } else { return NullValueConverter.fromCacheValue(value); } } }); } @Override public Uni<Void> invalidate(Object key) { Objects.requireNonNull(key, NULL_KEYS_NOT_SUPPORTED_MSG); return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.synchronous().invalidate(key); return null; } }); } @Override public Uni<Void> invalidateAll() { return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.synchronous().invalidateAll(); return null; } }); } @Override public Uni<Void> invalidateIf(Predicate<Object> predicate) { return Uni.createFrom().item(new Supplier<Void>() { @Override public Void get() { cache.asMap().keySet().removeIf(predicate); return null; } }); } @Override public Set<Object> keySet() { return Collections.unmodifiableSet(new HashSet<>(cache.asMap().keySet())); } @Override public <V> void put(Object key, CompletableFuture<V> valueFuture) { cache.put(key, (CompletableFuture<Object>) valueFuture); } @Override public void setExpireAfterWrite(Duration duration) { Optional<FixedExpiration<Object, Object>> fixedExpiration = cache.synchronous().policy().expireAfterWrite(); if (fixedExpiration.isPresent()) { fixedExpiration.get().setExpiresAfter(duration); cacheInfo.expireAfterWrite = duration; } else { throw new IllegalStateException("The write-based expiration policy can only be changed if the cache was " + "constructed with an expire-after-write configuration value"); } } @Override public void setExpireAfterAccess(Duration duration) { Optional<FixedExpiration<Object, Object>> fixedExpiration = cache.synchronous().policy().expireAfterAccess(); if (fixedExpiration.isPresent()) { fixedExpiration.get().setExpiresAfter(duration); cacheInfo.expireAfterAccess = duration; } else { throw new IllegalStateException("The access-based expiration policy can only be changed if the cache was " + "constructed with an expire-after-access configuration value"); } } @Override public void setMaximumSize(long maximumSize) { Optional<Policy.Eviction<Object, Object>> eviction = cache.synchronous().policy().eviction(); if (eviction.isPresent()) { eviction.get().setMaximum(maximumSize); cacheInfo.maximumSize = maximumSize; } else { throw new IllegalStateException("The maximum size can only be changed if the cache was constructed with a " + "maximum-size configuration value"); } } public CaffeineCacheInfo getCacheInfo() { return cacheInfo; } public long getSize() { return cache.synchronous().estimatedSize(); } @SuppressWarnings("unchecked") private <T> T cast(Object value) { try { return (T) value; } catch (ClassCastException e) { throw new CacheException( "An existing cached value type does not match the type returned by the value loading function", e); } } @SuppressWarnings("unchecked") private <K, V> CompletionStage<V> computeWithStats(K key, Function<K, Uni<V>> valueLoader) { return (CompletionStage<V>) cache.asMap().compute(key, new BiFunction<Object, CompletableFuture<Object>, CompletableFuture<Object>>() { @Override public CompletableFuture<Object> apply(Object key, CompletableFuture<Object> value) { if (value == null) { statsCounter.recordMisses(1); return valueLoader.apply((K) key) .map(TO_CACHE_VALUE) .subscribeAsCompletionStage(); } else { LOGGER.tracef("Key [%s] found in cache [%s]", key, cacheInfo.name); statsCounter.recordHits(1); return value; } } }); } @SuppressWarnings("unchecked") private <K, V> CompletionStage<V> computeWithoutStats(K key, Function<K, Uni<V>> valueLoader) { return (CompletionStage<V>) cache.asMap().computeIfAbsent(key, new Function<Object, CompletableFuture<Object>>() { @Override }); } @SuppressWarnings("unchecked") private <V> Function<V, V> fromCacheValue() { return (Function<V, V>) FROM_CACHE_VALUE; } private static final Function<Object, Object> FROM_CACHE_VALUE = new Function<Object, Object>() { @Override public Object apply(Object value) { return NullValueConverter.fromCacheValue(value); } }; private static final Function<Object, Object> TO_CACHE_VALUE = new Function<Object, Object>() { @Override public Object apply(Object value) { return NullValueConverter.toCacheValue(value); } }; }
That is correct. But we don't need the `RuntimeException` anymore. We can just use `FutureUtils.completeExceptionally(e)` with the `InterruptedException` here
public void testJobBeingMarkedAsDirtyBeforeCleanup() throws Exception { final OneShotLatch markAsDirtyLatch = new OneShotLatch(); final TestingDispatcher.Builder dispatcherBuilder = createTestingDispatcherBuilder() .setJobResultStore( TestingJobResultStore.builder() .withCreateDirtyResultConsumer( ignoredJobResultEntry -> { CompletableFuture<Boolean> result = new CompletableFuture<>(); try { markAsDirtyLatch.await(); } catch (InterruptedException e) { result.completeExceptionally( new RuntimeException(e)); } result.complete(true); return result; }) .build()); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(dispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThatNoCleanupWasTriggered(); markAsDirtyLatch.trigger(); assertGlobalCleanupTriggered(jobId); }
new RuntimeException(e));
public void testJobBeingMarkedAsDirtyBeforeCleanup() throws Exception { final OneShotLatch markAsDirtyLatch = new OneShotLatch(); final TestingDispatcher.Builder dispatcherBuilder = createTestingDispatcherBuilder() .setJobResultStore( TestingJobResultStore.builder() .withCreateDirtyResultConsumer( ignoredJobResultEntry -> { try { markAsDirtyLatch.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return FutureUtils.completedExceptionally( e); } return FutureUtils.completedVoidFuture(); }) .build()); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(dispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThatNoCleanupWasTriggered(); markAsDirtyLatch.trigger(); assertGlobalCleanupTriggered(jobId); }
class DispatcherResourceCleanupTest extends TestLogger { @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public final TestingFatalErrorHandlerResource testingFatalErrorHandlerResource = new TestingFatalErrorHandlerResource(); private static final Time timeout = Time.seconds(10L); private static TestingRpcService rpcService; private JobID jobId; private JobGraph jobGraph; private TestingDispatcher dispatcher; private DispatcherGateway dispatcherGateway; private BlobServer blobServer; private CompletableFuture<JobID> localCleanupFuture; private CompletableFuture<JobID> globalCleanupFuture; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); } @Before public void setup() throws Exception { jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); jobId = jobGraph.getJobID(); globalCleanupFuture = new CompletableFuture<>(); localCleanupFuture = new CompletableFuture<>(); blobServer = BlobUtils.createBlobServer( new Configuration(), Reference.owned(temporaryFolder.newFolder()), new TestingBlobStoreBuilder().createTestingBlobStore()); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob() throws Exception { return startDispatcherAndSubmitJob(0); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob( int numBlockingJobManagerRunners) throws Exception { return startDispatcherAndSubmitJob( createTestingDispatcherBuilder(), numBlockingJobManagerRunners); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob( TestingDispatcher.Builder dispatcherBuilder, int numBlockingJobManagerRunners) throws Exception { final TestingJobMasterServiceLeadershipRunnerFactory testingJobManagerRunnerFactoryNG = new TestingJobMasterServiceLeadershipRunnerFactory(numBlockingJobManagerRunners); startDispatcher(dispatcherBuilder, testingJobManagerRunnerFactoryNG); submitJobAndWait(); return testingJobManagerRunnerFactoryNG; } private void startDispatcher(JobManagerRunnerFactory jobManagerRunnerFactory) throws Exception { startDispatcher(createTestingDispatcherBuilder(), jobManagerRunnerFactory); } private void startDispatcher( TestingDispatcher.Builder dispatcherBuilder, JobManagerRunnerFactory jobManagerRunnerFactory) throws Exception { dispatcher = dispatcherBuilder .setJobManagerRunnerFactory(jobManagerRunnerFactory) .build(rpcService); dispatcher.start(); dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class); } private TestingDispatcher.Builder createTestingDispatcherBuilder() { final JobManagerRunnerRegistry jobManagerRunnerRegistry = new DefaultJobManagerRunnerRegistry(2); return TestingDispatcher.builder() .setBlobServer(blobServer) .setJobManagerRunnerRegistry(jobManagerRunnerRegistry) .setFatalErrorHandler(testingFatalErrorHandlerResource.getFatalErrorHandler()) .setResourceCleanerFactory( TestingResourceCleanerFactory.builder() .withLocallyCleanableResource(jobManagerRunnerRegistry) .withGloballyCleanableResource( (jobId, ignoredExecutor) -> { globalCleanupFuture.complete(jobId); return FutureUtils.completedVoidFuture(); }) .withLocallyCleanableResource( (jobId, ignoredExecutor) -> { localCleanupFuture.complete(jobId); return FutureUtils.completedVoidFuture(); }) .build()); } @After public void teardown() throws Exception { if (dispatcher != null) { dispatcher.close(); } if (blobServer != null) { blobServer.close(); } } @AfterClass public static void teardownClass() throws ExecutionException, InterruptedException { if (rpcService != null) { rpcService.closeAsync().get(); } } @Test public void testGlobalCleanupWhenJobFinished() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertGlobalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobCanceled() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); cancelJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertGlobalCleanupTriggered(jobId); } private CompletableFuture<Acknowledge> submitJob() { return dispatcherGateway.submitJob(jobGraph, timeout); } private void submitJobAndWait() { submitJob().join(); } @Test public void testLocalCleanupWhenJobNotFinished() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); suspendJob(testingJobManagerRunner); assertLocalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobSubmissionFails() throws Exception { startDispatcher(new FailingJobManagerRunnerFactory(new FlinkException("Test exception"))); final CompletableFuture<Acknowledge> submissionFuture = submitJob(); try { submissionFuture.get(); fail("Job submission was expected to fail."); } catch (ExecutionException ee) { assertThat(ee, containsCause(JobSubmissionException.class)); } assertGlobalCleanupTriggered(jobId); } @Test public void testLocalCleanupWhenClosingDispatcher() throws Exception { startDispatcherAndSubmitJob(); dispatcher.closeAsync().get(); assertLocalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobFinishedWhileClosingDispatcher() throws Exception { final TestingJobManagerRunner testingJobManagerRunner = TestingJobManagerRunner.newBuilder() .setBlockingTermination(true) .setJobId(jobId) .build(); final Queue<JobManagerRunner> jobManagerRunners = new ArrayDeque<>(Arrays.asList(testingJobManagerRunner)); startDispatcher(new QueueJobManagerRunnerFactory(jobManagerRunners)); submitJobAndWait(); final CompletableFuture<Void> dispatcherTerminationFuture = dispatcher.closeAsync(); testingJobManagerRunner.getCloseAsyncCalledLatch().await(); testingJobManagerRunner.completeResultFuture( new ExecutionGraphInfo( new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build())); testingJobManagerRunner.completeTerminationFuture(); dispatcherTerminationFuture.get(); assertGlobalCleanupTriggered(jobId); } @Test @Test public void testJobBeingMarkedAsCleanAfterCleanup() throws Exception { final CompletableFuture<JobID> markAsCleanFuture = new CompletableFuture<>(); final JobResultStore jobResultStore = TestingJobResultStore.builder() .withMarkResultAsCleanConsumer( jobID -> { markAsCleanFuture.complete(jobID); return CompletableFuture.completedFuture(null); }) .build(); final OneShotLatch localCleanupLatch = new OneShotLatch(); final OneShotLatch globalCleanupLatch = new OneShotLatch(); final TestingResourceCleanerFactory resourceCleanerFactory = TestingResourceCleanerFactory.builder() .withLocallyCleanableResource( (ignoredJobId, ignoredExecutor) -> { try { localCleanupLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return FutureUtils.completedVoidFuture(); }) .withGloballyCleanableResource( (ignoredJobId, ignoredExecutor) -> { try { globalCleanupLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return FutureUtils.completedVoidFuture(); }) .build(); final TestingDispatcher.Builder dispatcherBuilder = createTestingDispatcherBuilder() .setJobResultStore(jobResultStore) .setResourceCleanerFactory(resourceCleanerFactory); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(dispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThat(markAsCleanFuture.isDone(), is(false)); localCleanupLatch.trigger(); assertThat(markAsCleanFuture.isDone(), is(false)); globalCleanupLatch.trigger(); assertThat(markAsCleanFuture.get(), is(jobId)); } /** * Tests that the previous JobManager needs to be completely terminated before a new job with * the same {@link JobID} is started. */ @Test public void testJobSubmissionUnderSameJobId() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(1); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); suspendJob(testingJobManagerRunner); testingJobManagerRunner.getCloseAsyncCalledLatch().await(); final CompletableFuture<Acknowledge> submissionFuture = dispatcherGateway.submitJob(jobGraph, timeout); try { submissionFuture.get(10L, TimeUnit.MILLISECONDS); fail( "The job submission future should not complete until the previous JobManager " + "termination future has been completed."); } catch (TimeoutException ignored) { } finally { testingJobManagerRunner.completeTerminationFuture(); } assertThat(submissionFuture.get(), equalTo(Acknowledge.get())); } /** * Tests that a duplicate job submission won't delete any job meta data (submitted job graphs, * blobs, etc.). */ @Test public void testDuplicateJobSubmissionDoesNotDeleteJobMetaData() throws Exception { final TestingJobManagerRunnerFactory testingJobManagerRunnerFactoryNG = startDispatcherAndSubmitJob(); final CompletableFuture<Acknowledge> submissionFuture = dispatcherGateway.submitJob(jobGraph, timeout); try { try { submissionFuture.get(); fail("Expected a DuplicateJobSubmissionFailure."); } catch (ExecutionException ee) { assertThat( ExceptionUtils.findThrowable(ee, DuplicateJobSubmissionException.class) .isPresent(), is(true)); } assertThatNoCleanupWasTriggered(); } finally { finishJob(testingJobManagerRunnerFactoryNG.takeCreatedJobManagerRunner()); } assertGlobalCleanupTriggered(jobId); } private void finishJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.FINISHED); } private void suspendJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.SUSPENDED); } private void cancelJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.CANCELED); } private void terminateJobWithState( TestingJobManagerRunner takeCreatedJobManagerRunner, JobStatus state) { takeCreatedJobManagerRunner.completeResultFuture( new ExecutionGraphInfo( new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(state) .build())); } private void assertThatNoCleanupWasTriggered() { assertThat(globalCleanupFuture.isDone(), is(false)); assertThat(localCleanupFuture.isDone(), is(false)); } @Test public void testDispatcherTerminationTerminatesRunningJobMasters() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); dispatcher.closeAsync().get(); final TestingJobManagerRunner jobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); assertThat(jobManagerRunner.getTerminationFuture().isDone(), is(true)); } /** Tests that terminating the Dispatcher will wait for all JobMasters to be terminated. */ @Test public void testDispatcherTerminationWaitsForJobMasterTerminations() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(1); final CompletableFuture<Void> dispatcherTerminationFuture = dispatcher.closeAsync(); try { dispatcherTerminationFuture.get(10L, TimeUnit.MILLISECONDS); fail("We should not terminate before all running JobMasters have terminated."); } catch (TimeoutException ignored) { } finally { jobManagerRunnerFactory.takeCreatedJobManagerRunner().completeTerminationFuture(); } dispatcherTerminationFuture.get(); } private void assertLocalCleanupTriggered(JobID jobId) throws ExecutionException, InterruptedException, TimeoutException { assertThat(localCleanupFuture.get(), equalTo(jobId)); assertThat(globalCleanupFuture.isDone(), is(false)); } private void assertGlobalCleanupTriggered(JobID jobId) throws ExecutionException, InterruptedException, TimeoutException { assertThat(localCleanupFuture.isDone(), is(false)); assertThat(globalCleanupFuture.get(), equalTo(jobId)); } @Test public void testFatalErrorIfJobCannotBeMarkedDirtyInJobResultStore() throws Exception { final JobResultStore jobResultStore = TestingJobResultStore.builder() .withCreateDirtyResultConsumer( jobResult -> { CompletableFuture<Boolean> future = new CompletableFuture<>(); future.completeExceptionally( new IOException("Expected IOException.")); return future; }) .build(); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob( createTestingDispatcherBuilder().setJobResultStore(jobResultStore), 0); ArchivedExecutionGraph executionGraph = new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); testingJobManagerRunner.completeResultFuture(new ExecutionGraphInfo(executionGraph)); final CompletableFuture<? extends Throwable> errorFuture = this.testingFatalErrorHandlerResource.getFatalErrorHandler().getErrorFuture(); assertThat( errorFuture.get(100, TimeUnit.MILLISECONDS), IsInstanceOf.instanceOf(FlinkException.class)); testingFatalErrorHandlerResource.getFatalErrorHandler().clearError(); } @Test public void testErrorHandlingIfJobCannotBeMarkedAsCleanInJobResultStore() throws Exception { final CompletableFuture<JobResultEntry> dirtyJobFuture = new CompletableFuture<>(); final JobResultStore jobResultStore = TestingJobResultStore.builder() .withCreateDirtyResultConsumer( jobResultEntry -> { dirtyJobFuture.complete(jobResultEntry); return CompletableFuture.completedFuture(true); }) .withMarkResultAsCleanConsumer( jobId -> { CompletableFuture<Void> result = new CompletableFuture<>(); result.completeExceptionally( new IOException("Expected IOException.")); return result; }) .build(); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob( createTestingDispatcherBuilder().setJobResultStore(jobResultStore), 0); ArchivedExecutionGraph executionGraph = new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); testingJobManagerRunner.completeResultFuture(new ExecutionGraphInfo(executionGraph)); final CompletableFuture<? extends Throwable> errorFuture = this.testingFatalErrorHandlerResource.getFatalErrorHandler().getErrorFuture(); try { final Throwable unexpectedError = errorFuture.get(100, TimeUnit.MILLISECONDS); fail( "No error should have been reported but an " + unexpectedError.getClass() + " was handled."); } catch (TimeoutException e) { } assertThat(dirtyJobFuture.get().getJobId(), is(jobId)); } /** Tests that a failing {@link JobManagerRunner} will be properly cleaned up. */ @Test public void testFailingJobManagerRunnerCleanup() throws Exception { final FlinkException testException = new FlinkException("Test exception."); final ArrayBlockingQueue<Optional<Exception>> queue = new ArrayBlockingQueue<>(2); final BlockingJobManagerRunnerFactory blockingJobManagerRunnerFactory = new BlockingJobManagerRunnerFactory( () -> { final Optional<Exception> maybeException = queue.take(); if (maybeException.isPresent()) { throw maybeException.get(); } }); startDispatcher(blockingJobManagerRunnerFactory); final DispatcherGateway dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class); queue.offer(Optional.of(testException)); try { dispatcherGateway.submitJob(jobGraph, Time.minutes(1)).get(); fail("A FlinkException is expected"); } catch (Throwable expectedException) { assertThat(expectedException, containsCause(FlinkException.class)); assertThat(expectedException, containsMessage(testException.getMessage())); assertGlobalCleanupTriggered(jobId); } queue.offer(Optional.empty()); dispatcherGateway.submitJob(jobGraph, Time.minutes(1L)).get(); blockingJobManagerRunnerFactory.setJobStatus(JobStatus.RUNNING); awaitStatus(dispatcherGateway, jobId, JobStatus.RUNNING); } @Test public void testArchivingFinishedJobToHistoryServer() throws Exception { final CompletableFuture<Acknowledge> archiveFuture = new CompletableFuture<>(); final TestingDispatcher.Builder testingDispatcherBuilder = createTestingDispatcherBuilder() .setHistoryServerArchivist(executionGraphInfo -> archiveFuture); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(testingDispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThatNoCleanupWasTriggered(); final CompletableFuture<Void> jobTerminationFuture = dispatcher.getJobTerminationFuture(jobId, Time.hours(1)); assertFalse(jobTerminationFuture.isDone()); archiveFuture.complete(Acknowledge.get()); assertGlobalCleanupTriggered(jobId); jobTerminationFuture.join(); } @Test public void testNotArchivingSuspendedJobToHistoryServer() throws Exception { final AtomicBoolean isArchived = new AtomicBoolean(false); final TestingDispatcher.Builder testingDispatcherBuilder = createTestingDispatcherBuilder() .setHistoryServerArchivist( executionGraphInfo -> { isArchived.set(true); return CompletableFuture.completedFuture(Acknowledge.get()); }); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(testingDispatcherBuilder, 0); suspendJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertLocalCleanupTriggered(jobId); dispatcher.getJobTerminationFuture(jobId, Time.hours(1)).join(); assertFalse(isArchived.get()); } private static final class BlockingJobManagerRunnerFactory extends TestingJobMasterServiceLeadershipRunnerFactory { private final ThrowingRunnable<Exception> jobManagerRunnerCreationLatch; private TestingJobManagerRunner testingRunner; BlockingJobManagerRunnerFactory(ThrowingRunnable<Exception> jobManagerRunnerCreationLatch) { this.jobManagerRunnerCreationLatch = jobManagerRunnerCreationLatch; } @Override public TestingJobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerSharedServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) throws Exception { jobManagerRunnerCreationLatch.run(); this.testingRunner = super.createJobManagerRunner( jobGraph, configuration, rpcService, highAvailabilityServices, heartbeatServices, jobManagerSharedServices, jobManagerJobMetricGroupFactory, fatalErrorHandler, failureEnrichers, initializationTimestamp); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setRequestJobSupplier( () -> CompletableFuture.completedFuture( new ExecutionGraphInfo( ArchivedExecutionGraph .createSparseArchivedExecutionGraph( jobGraph.getJobID(), jobGraph.getName(), JobStatus.RUNNING, null, null, 1337)))) .build(); testingRunner.completeJobMasterGatewayFuture(testingJobMasterGateway); return testingRunner; } public void setJobStatus(JobStatus newStatus) { Preconditions.checkState( testingRunner != null, "JobManagerRunner must be created before this method is available"); this.testingRunner.setJobStatus(newStatus); } } private static final class QueueJobManagerRunnerFactory implements JobManagerRunnerFactory { private final Queue<? extends JobManagerRunner> jobManagerRunners; private QueueJobManagerRunnerFactory(Queue<? extends JobManagerRunner> jobManagerRunners) { this.jobManagerRunners = jobManagerRunners; } @Override public JobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) { return Optional.ofNullable(jobManagerRunners.poll()) .orElseThrow( () -> new IllegalStateException( "Cannot create more JobManagerRunners.")); } } private class FailingJobManagerRunnerFactory implements JobManagerRunnerFactory { private final Exception testException; public FailingJobManagerRunnerFactory(FlinkException testException) { this.testException = testException; } @Override public JobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) throws Exception { throw testException; } } }
class DispatcherResourceCleanupTest extends TestLogger { @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public final TestingFatalErrorHandlerResource testingFatalErrorHandlerResource = new TestingFatalErrorHandlerResource(); private static final Time timeout = Time.seconds(10L); private static TestingRpcService rpcService; private JobID jobId; private JobGraph jobGraph; private TestingDispatcher dispatcher; private DispatcherGateway dispatcherGateway; private BlobServer blobServer; private CompletableFuture<JobID> localCleanupFuture; private CompletableFuture<JobID> globalCleanupFuture; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); } @Before public void setup() throws Exception { jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); jobId = jobGraph.getJobID(); globalCleanupFuture = new CompletableFuture<>(); localCleanupFuture = new CompletableFuture<>(); blobServer = BlobUtils.createBlobServer( new Configuration(), Reference.owned(temporaryFolder.newFolder()), new TestingBlobStoreBuilder().createTestingBlobStore()); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob() throws Exception { return startDispatcherAndSubmitJob(0); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob( int numBlockingJobManagerRunners) throws Exception { return startDispatcherAndSubmitJob( createTestingDispatcherBuilder(), numBlockingJobManagerRunners); } private TestingJobManagerRunnerFactory startDispatcherAndSubmitJob( TestingDispatcher.Builder dispatcherBuilder, int numBlockingJobManagerRunners) throws Exception { final TestingJobMasterServiceLeadershipRunnerFactory testingJobManagerRunnerFactoryNG = new TestingJobMasterServiceLeadershipRunnerFactory(numBlockingJobManagerRunners); startDispatcher(dispatcherBuilder, testingJobManagerRunnerFactoryNG); submitJobAndWait(); return testingJobManagerRunnerFactoryNG; } private void startDispatcher(JobManagerRunnerFactory jobManagerRunnerFactory) throws Exception { startDispatcher(createTestingDispatcherBuilder(), jobManagerRunnerFactory); } private void startDispatcher( TestingDispatcher.Builder dispatcherBuilder, JobManagerRunnerFactory jobManagerRunnerFactory) throws Exception { dispatcher = dispatcherBuilder .setJobManagerRunnerFactory(jobManagerRunnerFactory) .build(rpcService); dispatcher.start(); dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class); } private TestingDispatcher.Builder createTestingDispatcherBuilder() { final JobManagerRunnerRegistry jobManagerRunnerRegistry = new DefaultJobManagerRunnerRegistry(2); return TestingDispatcher.builder() .setBlobServer(blobServer) .setJobManagerRunnerRegistry(jobManagerRunnerRegistry) .setFatalErrorHandler(testingFatalErrorHandlerResource.getFatalErrorHandler()) .setResourceCleanerFactory( TestingResourceCleanerFactory.builder() .withLocallyCleanableResource(jobManagerRunnerRegistry) .withGloballyCleanableResource( (jobId, ignoredExecutor) -> { globalCleanupFuture.complete(jobId); return FutureUtils.completedVoidFuture(); }) .withLocallyCleanableResource( (jobId, ignoredExecutor) -> { localCleanupFuture.complete(jobId); return FutureUtils.completedVoidFuture(); }) .build()); } @After public void teardown() throws Exception { if (dispatcher != null) { dispatcher.close(); } if (blobServer != null) { blobServer.close(); } } @AfterClass public static void teardownClass() throws ExecutionException, InterruptedException { if (rpcService != null) { rpcService.closeAsync().get(); } } @Test public void testGlobalCleanupWhenJobFinished() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertGlobalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobCanceled() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); cancelJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertGlobalCleanupTriggered(jobId); } private CompletableFuture<Acknowledge> submitJob() { return dispatcherGateway.submitJob(jobGraph, timeout); } private void submitJobAndWait() { submitJob().join(); } @Test public void testLocalCleanupWhenJobNotFinished() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); suspendJob(testingJobManagerRunner); assertLocalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobSubmissionFails() throws Exception { startDispatcher(new FailingJobManagerRunnerFactory(new FlinkException("Test exception"))); final CompletableFuture<Acknowledge> submissionFuture = submitJob(); try { submissionFuture.get(); fail("Job submission was expected to fail."); } catch (ExecutionException ee) { assertThat(ee, containsCause(JobSubmissionException.class)); } assertGlobalCleanupTriggered(jobId); } @Test public void testLocalCleanupWhenClosingDispatcher() throws Exception { startDispatcherAndSubmitJob(); dispatcher.closeAsync().get(); assertLocalCleanupTriggered(jobId); } @Test public void testGlobalCleanupWhenJobFinishedWhileClosingDispatcher() throws Exception { final TestingJobManagerRunner testingJobManagerRunner = TestingJobManagerRunner.newBuilder() .setBlockingTermination(true) .setJobId(jobId) .build(); final Queue<JobManagerRunner> jobManagerRunners = new ArrayDeque<>(Arrays.asList(testingJobManagerRunner)); startDispatcher(new QueueJobManagerRunnerFactory(jobManagerRunners)); submitJobAndWait(); final CompletableFuture<Void> dispatcherTerminationFuture = dispatcher.closeAsync(); testingJobManagerRunner.getCloseAsyncCalledLatch().await(); testingJobManagerRunner.completeResultFuture( new ExecutionGraphInfo( new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build())); testingJobManagerRunner.completeTerminationFuture(); dispatcherTerminationFuture.get(); assertGlobalCleanupTriggered(jobId); } @Test @Test public void testJobBeingMarkedAsCleanAfterCleanup() throws Exception { final CompletableFuture<JobID> markAsCleanFuture = new CompletableFuture<>(); final JobResultStore jobResultStore = TestingJobResultStore.builder() .withMarkResultAsCleanConsumer( jobID -> { markAsCleanFuture.complete(jobID); return FutureUtils.completedVoidFuture(); }) .build(); final OneShotLatch localCleanupLatch = new OneShotLatch(); final OneShotLatch globalCleanupLatch = new OneShotLatch(); final TestingResourceCleanerFactory resourceCleanerFactory = TestingResourceCleanerFactory.builder() .withLocallyCleanableResource( (ignoredJobId, ignoredExecutor) -> { try { localCleanupLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return FutureUtils.completedVoidFuture(); }) .withGloballyCleanableResource( (ignoredJobId, ignoredExecutor) -> { try { globalCleanupLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return FutureUtils.completedVoidFuture(); }) .build(); final TestingDispatcher.Builder dispatcherBuilder = createTestingDispatcherBuilder() .setJobResultStore(jobResultStore) .setResourceCleanerFactory(resourceCleanerFactory); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(dispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThat(markAsCleanFuture.isDone(), is(false)); localCleanupLatch.trigger(); assertThat(markAsCleanFuture.isDone(), is(false)); globalCleanupLatch.trigger(); assertThat(markAsCleanFuture.get(), is(jobId)); } /** * Tests that the previous JobManager needs to be completely terminated before a new job with * the same {@link JobID} is started. */ @Test public void testJobSubmissionUnderSameJobId() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(1); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); suspendJob(testingJobManagerRunner); testingJobManagerRunner.getCloseAsyncCalledLatch().await(); final CompletableFuture<Acknowledge> submissionFuture = dispatcherGateway.submitJob(jobGraph, timeout); try { submissionFuture.get(10L, TimeUnit.MILLISECONDS); fail( "The job submission future should not complete until the previous JobManager " + "termination future has been completed."); } catch (TimeoutException ignored) { } finally { testingJobManagerRunner.completeTerminationFuture(); } assertThat(submissionFuture.get(), equalTo(Acknowledge.get())); } /** * Tests that a duplicate job submission won't delete any job meta data (submitted job graphs, * blobs, etc.). */ @Test public void testDuplicateJobSubmissionDoesNotDeleteJobMetaData() throws Exception { final TestingJobManagerRunnerFactory testingJobManagerRunnerFactoryNG = startDispatcherAndSubmitJob(); final CompletableFuture<Acknowledge> submissionFuture = dispatcherGateway.submitJob(jobGraph, timeout); try { try { submissionFuture.get(); fail("Expected a DuplicateJobSubmissionFailure."); } catch (ExecutionException ee) { assertThat( ExceptionUtils.findThrowable(ee, DuplicateJobSubmissionException.class) .isPresent(), is(true)); } assertThatNoCleanupWasTriggered(); } finally { finishJob(testingJobManagerRunnerFactoryNG.takeCreatedJobManagerRunner()); } assertGlobalCleanupTriggered(jobId); } private void finishJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.FINISHED); } private void suspendJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.SUSPENDED); } private void cancelJob(TestingJobManagerRunner takeCreatedJobManagerRunner) { terminateJobWithState(takeCreatedJobManagerRunner, JobStatus.CANCELED); } private void terminateJobWithState( TestingJobManagerRunner takeCreatedJobManagerRunner, JobStatus state) { takeCreatedJobManagerRunner.completeResultFuture( new ExecutionGraphInfo( new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(state) .build())); } private void assertThatNoCleanupWasTriggered() { assertThat(globalCleanupFuture.isDone(), is(false)); assertThat(localCleanupFuture.isDone(), is(false)); } @Test public void testDispatcherTerminationTerminatesRunningJobMasters() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(); dispatcher.closeAsync().get(); final TestingJobManagerRunner jobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); assertThat(jobManagerRunner.getTerminationFuture().isDone(), is(true)); } /** Tests that terminating the Dispatcher will wait for all JobMasters to be terminated. */ @Test public void testDispatcherTerminationWaitsForJobMasterTerminations() throws Exception { final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(1); final CompletableFuture<Void> dispatcherTerminationFuture = dispatcher.closeAsync(); try { dispatcherTerminationFuture.get(10L, TimeUnit.MILLISECONDS); fail("We should not terminate before all running JobMasters have terminated."); } catch (TimeoutException ignored) { } finally { jobManagerRunnerFactory.takeCreatedJobManagerRunner().completeTerminationFuture(); } dispatcherTerminationFuture.get(); } private void assertLocalCleanupTriggered(JobID jobId) throws ExecutionException, InterruptedException, TimeoutException { assertThat(localCleanupFuture.get(), equalTo(jobId)); assertThat(globalCleanupFuture.isDone(), is(false)); } private void assertGlobalCleanupTriggered(JobID jobId) throws ExecutionException, InterruptedException, TimeoutException { assertThat(localCleanupFuture.isDone(), is(false)); assertThat(globalCleanupFuture.get(), equalTo(jobId)); } @Test public void testFatalErrorIfJobCannotBeMarkedDirtyInJobResultStore() throws Exception { final JobResultStore jobResultStore = TestingJobResultStore.builder() .withCreateDirtyResultConsumer( jobResult -> FutureUtils.completedExceptionally( new IOException("Expected IOException."))) .build(); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob( createTestingDispatcherBuilder().setJobResultStore(jobResultStore), 0); ArchivedExecutionGraph executionGraph = new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); testingJobManagerRunner.completeResultFuture(new ExecutionGraphInfo(executionGraph)); final CompletableFuture<? extends Throwable> errorFuture = this.testingFatalErrorHandlerResource.getFatalErrorHandler().getErrorFuture(); assertThat( errorFuture.get(100, TimeUnit.MILLISECONDS), IsInstanceOf.instanceOf(FlinkException.class)); testingFatalErrorHandlerResource.getFatalErrorHandler().clearError(); } @Test public void testErrorHandlingIfJobCannotBeMarkedAsCleanInJobResultStore() throws Exception { final CompletableFuture<JobResultEntry> dirtyJobFuture = new CompletableFuture<>(); final JobResultStore jobResultStore = TestingJobResultStore.builder() .withCreateDirtyResultConsumer( jobResultEntry -> { dirtyJobFuture.complete(jobResultEntry); return FutureUtils.completedVoidFuture(); }) .withMarkResultAsCleanConsumer( jobId -> FutureUtils.completedExceptionally( new IOException("Expected IOException."))) .build(); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob( createTestingDispatcherBuilder().setJobResultStore(jobResultStore), 0); ArchivedExecutionGraph executionGraph = new ArchivedExecutionGraphBuilder() .setJobID(jobId) .setState(JobStatus.FINISHED) .build(); final TestingJobManagerRunner testingJobManagerRunner = jobManagerRunnerFactory.takeCreatedJobManagerRunner(); testingJobManagerRunner.completeResultFuture(new ExecutionGraphInfo(executionGraph)); final CompletableFuture<? extends Throwable> errorFuture = this.testingFatalErrorHandlerResource.getFatalErrorHandler().getErrorFuture(); try { final Throwable unexpectedError = errorFuture.get(100, TimeUnit.MILLISECONDS); fail( "No error should have been reported but an " + unexpectedError.getClass() + " was handled."); } catch (TimeoutException e) { } assertThat(dirtyJobFuture.get().getJobId(), is(jobId)); } /** Tests that a failing {@link JobManagerRunner} will be properly cleaned up. */ @Test public void testFailingJobManagerRunnerCleanup() throws Exception { final FlinkException testException = new FlinkException("Test exception."); final ArrayBlockingQueue<Optional<Exception>> queue = new ArrayBlockingQueue<>(2); final BlockingJobManagerRunnerFactory blockingJobManagerRunnerFactory = new BlockingJobManagerRunnerFactory( () -> { final Optional<Exception> maybeException = queue.take(); if (maybeException.isPresent()) { throw maybeException.get(); } }); startDispatcher(blockingJobManagerRunnerFactory); final DispatcherGateway dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class); queue.offer(Optional.of(testException)); try { dispatcherGateway.submitJob(jobGraph, Time.minutes(1)).get(); fail("A FlinkException is expected"); } catch (Throwable expectedException) { assertThat(expectedException, containsCause(FlinkException.class)); assertThat(expectedException, containsMessage(testException.getMessage())); assertGlobalCleanupTriggered(jobId); } queue.offer(Optional.empty()); dispatcherGateway.submitJob(jobGraph, Time.minutes(1L)).get(); blockingJobManagerRunnerFactory.setJobStatus(JobStatus.RUNNING); awaitStatus(dispatcherGateway, jobId, JobStatus.RUNNING); } @Test public void testArchivingFinishedJobToHistoryServer() throws Exception { final CompletableFuture<Acknowledge> archiveFuture = new CompletableFuture<>(); final TestingDispatcher.Builder testingDispatcherBuilder = createTestingDispatcherBuilder() .setHistoryServerArchivist(executionGraphInfo -> archiveFuture); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(testingDispatcherBuilder, 0); finishJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertThatNoCleanupWasTriggered(); final CompletableFuture<Void> jobTerminationFuture = dispatcher.getJobTerminationFuture(jobId, Time.hours(1)); assertFalse(jobTerminationFuture.isDone()); archiveFuture.complete(Acknowledge.get()); assertGlobalCleanupTriggered(jobId); jobTerminationFuture.join(); } @Test public void testNotArchivingSuspendedJobToHistoryServer() throws Exception { final AtomicBoolean isArchived = new AtomicBoolean(false); final TestingDispatcher.Builder testingDispatcherBuilder = createTestingDispatcherBuilder() .setHistoryServerArchivist( executionGraphInfo -> { isArchived.set(true); return CompletableFuture.completedFuture(Acknowledge.get()); }); final TestingJobManagerRunnerFactory jobManagerRunnerFactory = startDispatcherAndSubmitJob(testingDispatcherBuilder, 0); suspendJob(jobManagerRunnerFactory.takeCreatedJobManagerRunner()); assertLocalCleanupTriggered(jobId); dispatcher.getJobTerminationFuture(jobId, Time.hours(1)).join(); assertFalse(isArchived.get()); } private static final class BlockingJobManagerRunnerFactory extends TestingJobMasterServiceLeadershipRunnerFactory { private final ThrowingRunnable<Exception> jobManagerRunnerCreationLatch; private TestingJobManagerRunner testingRunner; BlockingJobManagerRunnerFactory(ThrowingRunnable<Exception> jobManagerRunnerCreationLatch) { this.jobManagerRunnerCreationLatch = jobManagerRunnerCreationLatch; } @Override public TestingJobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerSharedServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) throws Exception { jobManagerRunnerCreationLatch.run(); this.testingRunner = super.createJobManagerRunner( jobGraph, configuration, rpcService, highAvailabilityServices, heartbeatServices, jobManagerSharedServices, jobManagerJobMetricGroupFactory, fatalErrorHandler, failureEnrichers, initializationTimestamp); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setRequestJobSupplier( () -> CompletableFuture.completedFuture( new ExecutionGraphInfo( ArchivedExecutionGraph .createSparseArchivedExecutionGraph( jobGraph.getJobID(), jobGraph.getName(), JobStatus.RUNNING, null, null, 1337)))) .build(); testingRunner.completeJobMasterGatewayFuture(testingJobMasterGateway); return testingRunner; } public void setJobStatus(JobStatus newStatus) { Preconditions.checkState( testingRunner != null, "JobManagerRunner must be created before this method is available"); this.testingRunner.setJobStatus(newStatus); } } private static final class QueueJobManagerRunnerFactory implements JobManagerRunnerFactory { private final Queue<? extends JobManagerRunner> jobManagerRunners; private QueueJobManagerRunnerFactory(Queue<? extends JobManagerRunner> jobManagerRunners) { this.jobManagerRunners = jobManagerRunners; } @Override public JobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) { return Optional.ofNullable(jobManagerRunners.poll()) .orElseThrow( () -> new IllegalStateException( "Cannot create more JobManagerRunners.")); } } private class FailingJobManagerRunnerFactory implements JobManagerRunnerFactory { private final Exception testException; public FailingJobManagerRunnerFactory(FlinkException testException) { this.testException = testException; } @Override public JobManagerRunner createJobManagerRunner( JobGraph jobGraph, Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, JobManagerSharedServices jobManagerServices, JobManagerJobMetricGroupFactory jobManagerJobMetricGroupFactory, FatalErrorHandler fatalErrorHandler, Collection<FailureEnricher> failureEnrichers, long initializationTimestamp) throws Exception { throw testException; } } }
Improve the code a bit: ```java final TableColumn tableColumn = tableSchema.getTableColumns().get(i); final String fieldName = tableColumn.getName(); final DataType fieldType = tableColumn.getType(); final boolean isGeneratedColumn = tableColumn.isGenerated(); ``` We do know it's safe to call `tableSchema.getTableColumns().get(i)`.
public static TableSchema deriveTableSinkSchema(DescriptorProperties properties) { TableSchema.Builder builder = TableSchema.builder(); TableSchema tableSchema = properties.getTableSchema(SCHEMA); for (int i = 0; i < tableSchema.getFieldCount(); i++) { TypeInformation t = tableSchema.getFieldTypes()[i]; String n = tableSchema.getFieldNames()[i]; Optional<TableColumn> tableColumn = tableSchema.getTableColumn(n); boolean isGeneratedColumn = tableColumn.isPresent() && tableColumn.get().isGenerated(); if (isGeneratedColumn) { continue; } boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); String tsType = SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE; boolean isRowtime = properties.containsKey(tsType); if (!isProctime && !isRowtime) { String fieldName = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM) .orElse(n); builder.field(fieldName, t); } else if (isRowtime) { switch (properties.getString(tsType)) { case ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD: String field = properties.getString(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_FROM); builder.field(field, t); break; default: throw new TableException(format("Unsupported rowtime type '%s' for sink" + " table schema. Currently only '%s' is supported for table sinks.", t, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)); } } } return builder.build(); }
boolean isGeneratedColumn = tableColumn.isPresent() && tableColumn.get().isGenerated();
public static TableSchema deriveTableSinkSchema(DescriptorProperties properties) { TableSchema.Builder builder = TableSchema.builder(); TableSchema tableSchema = properties.getTableSchema(SCHEMA); for (int i = 0; i < tableSchema.getFieldCount(); i++) { final TableColumn tableColumn = tableSchema.getTableColumns().get(i); final String fieldName = tableColumn.getName(); final DataType dataType = tableColumn.getType(); boolean isGeneratedColumn = tableColumn.isGenerated(); if (isGeneratedColumn) { continue; } boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); String tsType = SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE; boolean isRowtime = properties.containsKey(tsType); if (!isProctime && !isRowtime) { String aliasName = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM) .orElse(fieldName); builder.field(aliasName, dataType); } else if (isRowtime) { switch (properties.getString(tsType)) { case ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD: String field = properties.getString(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_FROM); builder.field(field, dataType); break; default: throw new TableException(format("Unsupported rowtime type '%s' for sink" + " table schema. Currently only '%s' is supported for table sinks.", dataType, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)); } } } return builder.build(); }
class SchemaValidator implements DescriptorValidator { private final boolean isStreamEnvironment; private final boolean supportsSourceTimestamps; private final boolean supportsSourceWatermarks; public SchemaValidator(boolean isStreamEnvironment, boolean supportsSourceTimestamps, boolean supportsSourceWatermarks) { this.isStreamEnvironment = isStreamEnvironment; this.supportsSourceTimestamps = supportsSourceTimestamps; this.supportsSourceWatermarks = supportsSourceWatermarks; } @Override public void validate(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); Map<String, String> legacyTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_TYPE); Map<String, String> dataTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_DATA_TYPE); if (names.isEmpty() && legacyTypes.isEmpty() && dataTypes.isEmpty()) { throw new ValidationException( format("Could not find the required schema in property '%s'.", SCHEMA)); } boolean proctimeFound = false; for (int i = 0; i < Math.max(names.size(), legacyTypes.size()); i++) { properties.validateString(SCHEMA + "." + i + "." + SCHEMA_NAME, false, 1); properties.validateDataType(SCHEMA + "." + i + "." + SCHEMA_DATA_TYPE, SCHEMA + "." + i + "." + SCHEMA_TYPE, false); properties.validateString(SCHEMA + "." + i + "." + SCHEMA_FROM, true, 1); String proctime = SCHEMA + "." + i + "." + SCHEMA_PROCTIME; String rowtime = SCHEMA + "." + i + "." + ROWTIME; if (properties.containsKey(proctime)) { if (!isStreamEnvironment) { throw new ValidationException( format("Property '%s' is not allowed in a batch environment.", proctime)); } else if (proctimeFound) { throw new ValidationException("A proctime attribute must only be defined once."); } properties.validateBoolean(proctime, false); proctimeFound = properties.getBoolean(proctime); properties.validatePrefixExclusion(rowtime); } else if (properties.hasPrefix(rowtime)) { RowtimeValidator rowtimeValidator = new RowtimeValidator( supportsSourceTimestamps, supportsSourceWatermarks, SCHEMA + "." + i + "."); rowtimeValidator.validate(properties); properties.validateExclusion(proctime); } } } /** * Returns keys for a {@link TableFormatFactory * are accepted for schema derivation using {@code deriveFormatFields(DescriptorProperties)}. */ public static List<String> getSchemaDerivationKeys() { List<String> keys = new ArrayList<>(); keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + "." + WATERMARK + ". keys.add(SCHEMA + "." + WATERMARK + ". keys.add(SCHEMA + "." + WATERMARK + ". return keys; } /** * Finds the proctime attribute if defined. */ public static Optional<String> deriveProctimeAttribute(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { Optional<Boolean> isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME); if (isProctime.isPresent() && isProctime.get()) { return Optional.of(names.get(SCHEMA + "." + i + "." + SCHEMA_NAME)); } } return Optional.empty(); } /** * Finds the rowtime attributes if defined. */ public static List<RowtimeAttributeDescriptor> deriveRowtimeAttributes( DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); List<RowtimeAttributeDescriptor> attributes = new ArrayList<>(); for (int i = 0; i < names.size(); i++) { Optional<Tuple2<TimestampExtractor, WatermarkStrategy>> rowtimeComponents = RowtimeValidator .getRowtimeComponents(properties, SCHEMA + "." + i + "."); int index = i; rowtimeComponents.ifPresent(tuple2 -> attributes.add(new RowtimeAttributeDescriptor( properties.getString(SCHEMA + "." + index + "." + SCHEMA_NAME), tuple2.f0, tuple2.f1)) ); } return attributes; } /** * Derives the table schema for a table sink. A sink ignores a proctime attribute and * needs to track the origin of a rowtime field. * * @deprecated This method combines two separate concepts of table schema and field mapping. * This should be split into two methods once we have support for * the corresponding interfaces (see FLINK-9870). */ @Deprecated /** * Finds a table source field mapping. * * @param properties The properties describing a schema. * @param inputType The input type that a connector and/or format produces. This parameter * can be used to resolve a rowtime field against an input field. */ public static Map<String, String> deriveFieldMapping( DescriptorProperties properties, Optional<TypeInformation<?>> inputType) { Map<String, String> mapping = new HashMap<>(); TableSchema schema = properties.getTableSchema(SCHEMA); List<String> columnNames = new ArrayList<>(); inputType.ifPresent(t -> columnNames.addAll(Arrays.asList(((CompositeType) t).getFieldNames()))); columnNames.forEach(name -> mapping.put(name, name)); Arrays.stream(schema.getFieldNames()).forEach(name -> mapping.put(name, name)); Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { String name = properties.getString(SCHEMA + "." + i + "." + SCHEMA_NAME); Optional<String> source = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM); if (source.isPresent()) { mapping.put(name, source.get()); } else { boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); boolean isRowtime = properties .containsKey(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE); boolean isGeneratedColumn = properties .containsKey(SCHEMA + "." + i + "." + TABLE_SCHEMA_EXPR); if (isProctime || isRowtime || isGeneratedColumn) { mapping.remove(name); } else if (!columnNames.contains(name)) { throw new ValidationException(format("Could not map the schema field '%s' to a field " + "from source. Please specify the source field from which it can be derived.", name)); } } } return mapping; } }
class SchemaValidator implements DescriptorValidator { private final boolean isStreamEnvironment; private final boolean supportsSourceTimestamps; private final boolean supportsSourceWatermarks; public SchemaValidator(boolean isStreamEnvironment, boolean supportsSourceTimestamps, boolean supportsSourceWatermarks) { this.isStreamEnvironment = isStreamEnvironment; this.supportsSourceTimestamps = supportsSourceTimestamps; this.supportsSourceWatermarks = supportsSourceWatermarks; } @Override public void validate(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); Map<String, String> legacyTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_TYPE); Map<String, String> dataTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_DATA_TYPE); if (names.isEmpty() && legacyTypes.isEmpty() && dataTypes.isEmpty()) { throw new ValidationException( format("Could not find the required schema in property '%s'.", SCHEMA)); } boolean proctimeFound = false; for (int i = 0; i < Math.max(names.size(), legacyTypes.size()); i++) { properties.validateString(SCHEMA + "." + i + "." + SCHEMA_NAME, false, 1); properties.validateDataType(SCHEMA + "." + i + "." + SCHEMA_DATA_TYPE, SCHEMA + "." + i + "." + SCHEMA_TYPE, false); properties.validateString(SCHEMA + "." + i + "." + SCHEMA_FROM, true, 1); String proctime = SCHEMA + "." + i + "." + SCHEMA_PROCTIME; String rowtime = SCHEMA + "." + i + "." + ROWTIME; if (properties.containsKey(proctime)) { if (!isStreamEnvironment) { throw new ValidationException( format("Property '%s' is not allowed in a batch environment.", proctime)); } else if (proctimeFound) { throw new ValidationException("A proctime attribute must only be defined once."); } properties.validateBoolean(proctime, false); proctimeFound = properties.getBoolean(proctime); properties.validatePrefixExclusion(rowtime); } else if (properties.hasPrefix(rowtime)) { RowtimeValidator rowtimeValidator = new RowtimeValidator( supportsSourceTimestamps, supportsSourceWatermarks, SCHEMA + "." + i + "."); rowtimeValidator.validate(properties); properties.validateExclusion(proctime); } } } /** * Returns keys for a {@link TableFormatFactory * are accepted for schema derivation using {@code deriveFormatFields(DescriptorProperties)}. */ public static List<String> getSchemaDerivationKeys() { List<String> keys = new ArrayList<>(); keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + ". keys.add(SCHEMA + "." + WATERMARK + ". keys.add(SCHEMA + "." + WATERMARK + ". keys.add(SCHEMA + "." + WATERMARK + ". return keys; } /** * Finds the proctime attribute if defined. */ public static Optional<String> deriveProctimeAttribute(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { Optional<Boolean> isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME); if (isProctime.isPresent() && isProctime.get()) { return Optional.of(names.get(SCHEMA + "." + i + "." + SCHEMA_NAME)); } } return Optional.empty(); } /** * Finds the rowtime attributes if defined. */ public static List<RowtimeAttributeDescriptor> deriveRowtimeAttributes( DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); List<RowtimeAttributeDescriptor> attributes = new ArrayList<>(); for (int i = 0; i < names.size(); i++) { Optional<Tuple2<TimestampExtractor, WatermarkStrategy>> rowtimeComponents = RowtimeValidator .getRowtimeComponents(properties, SCHEMA + "." + i + "."); int index = i; rowtimeComponents.ifPresent(tuple2 -> attributes.add(new RowtimeAttributeDescriptor( properties.getString(SCHEMA + "." + index + "." + SCHEMA_NAME), tuple2.f0, tuple2.f1)) ); } return attributes; } /** * Derives the table schema for a table sink. A sink ignores a proctime attribute and * needs to track the origin of a rowtime field. * * @deprecated This method combines two separate concepts of table schema and field mapping. * This should be split into two methods once we have support for * the corresponding interfaces (see FLINK-9870). */ @Deprecated /** * Finds a table source field mapping. * * @param properties The properties describing a schema. * @param inputType The input type that a connector and/or format produces. This parameter * can be used to resolve a rowtime field against an input field. */ public static Map<String, String> deriveFieldMapping( DescriptorProperties properties, Optional<TypeInformation<?>> inputType) { Map<String, String> mapping = new HashMap<>(); TableSchema schema = properties.getTableSchema(SCHEMA); List<String> columnNames = new ArrayList<>(); inputType.ifPresent(t -> columnNames.addAll(Arrays.asList(((CompositeType) t).getFieldNames()))); columnNames.forEach(name -> mapping.put(name, name)); Arrays.stream(schema.getFieldNames()).forEach(name -> mapping.put(name, name)); Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { String name = properties.getString(SCHEMA + "." + i + "." + SCHEMA_NAME); Optional<String> source = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM); if (source.isPresent()) { mapping.put(name, source.get()); } else { boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); boolean isRowtime = properties .containsKey(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE); boolean isGeneratedColumn = properties .containsKey(SCHEMA + "." + i + "." + TABLE_SCHEMA_EXPR); if (isProctime || isRowtime || isGeneratedColumn) { mapping.remove(name); } else if (!columnNames.contains(name)) { throw new ValidationException(format("Could not map the schema field '%s' to a field " + "from source. Please specify the source field from which it can be derived.", name)); } } } return mapping; } }
It would be nice if this were another constant. We could have `DEFAULT_GROUPING_FACTOR_BOUNDED` and `DEFAULT_GROUPING_FACTOR_UNBOUNDED`. It doesn't need to be done here, could be in a follow-up PR.
public SpannerWriteResult expand(PCollection<MutationGroup> input) { PCollection<Void> schemaSeed = input.getPipeline().apply("Create Seed", Create.of((Void) null)); if (spec.getSchemaReadySignal() != null) { schemaSeed = schemaSeed.apply("Wait for schema", Wait.on(spec.getSchemaReadySignal())); } final PCollectionView<SpannerSchema> schemaView = schemaSeed .apply( "Read information schema", ParDo.of(new ReadSpannerSchema(spec.getSpannerConfig()))) .apply("Schema View", View.asSingleton()); PCollectionTuple filteredMutations = input .apply( "RewindowIntoGlobal", Window.<MutationGroup>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()) .apply( "Filter Unbatchable Mutations", ParDo.of( new BatchableMutationFilterFn( schemaView, UNBATCHABLE_MUTATIONS_TAG, spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows())) .withSideInputs(schemaView) .withOutputTags( BATCHABLE_MUTATIONS_TAG, TupleTagList.of(UNBATCHABLE_MUTATIONS_TAG))); PCollection<Iterable<MutationGroup>> batchedMutations = filteredMutations .get(BATCHABLE_MUTATIONS_TAG) .apply( "Gather And Sort", ParDo.of( new GatherBundleAndSortFn( spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows(), spec.getGroupingFactor() .orElse( input.isBounded() == IsBounded.BOUNDED ? DEFAULT_GROUPING_FACTOR : 1), schemaView)) .withSideInputs(schemaView)) .apply( "Create Batches", ParDo.of( new BatchFn( spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows(), schemaView)) .withSideInputs(schemaView)); PCollectionTuple result = PCollectionList.of(filteredMutations.get(UNBATCHABLE_MUTATIONS_TAG)) .and(batchedMutations) .apply("Merge", Flatten.pCollections()) .apply( "Write mutations to Spanner", ParDo.of( new WriteToSpannerFn( spec.getSpannerConfig(), spec.getFailureMode(), FAILED_MUTATIONS_TAG)) .withOutputTags(MAIN_OUT_TAG, TupleTagList.of(FAILED_MUTATIONS_TAG))); return new SpannerWriteResult( input.getPipeline(), result.get(MAIN_OUT_TAG), result.get(FAILED_MUTATIONS_TAG), FAILED_MUTATIONS_TAG); }
: 1),
public SpannerWriteResult expand(PCollection<MutationGroup> input) { PCollection<Void> schemaSeed = input.getPipeline().apply("Create Seed", Create.of((Void) null)); if (spec.getSchemaReadySignal() != null) { schemaSeed = schemaSeed.apply("Wait for schema", Wait.on(spec.getSchemaReadySignal())); } final PCollectionView<SpannerSchema> schemaView = schemaSeed .apply( "Read information schema", ParDo.of(new ReadSpannerSchema(spec.getSpannerConfig()))) .apply("Schema View", View.asSingleton()); PCollectionTuple filteredMutations = input .apply( "RewindowIntoGlobal", Window.<MutationGroup>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()) .apply( "Filter Unbatchable Mutations", ParDo.of( new BatchableMutationFilterFn( schemaView, UNBATCHABLE_MUTATIONS_TAG, spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows())) .withSideInputs(schemaView) .withOutputTags( BATCHABLE_MUTATIONS_TAG, TupleTagList.of(UNBATCHABLE_MUTATIONS_TAG))); PCollection<Iterable<MutationGroup>> batchedMutations = filteredMutations .get(BATCHABLE_MUTATIONS_TAG) .apply( "Gather And Sort", ParDo.of( new GatherBundleAndSortFn( spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows(), spec.getGroupingFactor() .orElse( input.isBounded() == IsBounded.BOUNDED ? DEFAULT_GROUPING_FACTOR : 1), schemaView)) .withSideInputs(schemaView)) .apply( "Create Batches", ParDo.of( new BatchFn( spec.getBatchSizeBytes(), spec.getMaxNumMutations(), spec.getMaxNumRows(), schemaView)) .withSideInputs(schemaView)); PCollectionTuple result = PCollectionList.of(filteredMutations.get(UNBATCHABLE_MUTATIONS_TAG)) .and(batchedMutations) .apply("Merge", Flatten.pCollections()) .apply( "Write mutations to Spanner", ParDo.of( new WriteToSpannerFn( spec.getSpannerConfig(), spec.getFailureMode(), FAILED_MUTATIONS_TAG)) .withOutputTags(MAIN_OUT_TAG, TupleTagList.of(FAILED_MUTATIONS_TAG))); return new SpannerWriteResult( input.getPipeline(), result.get(MAIN_OUT_TAG), result.get(FAILED_MUTATIONS_TAG), FAILED_MUTATIONS_TAG); }
class WriteGrouped extends PTransform<PCollection<MutationGroup>, SpannerWriteResult> { private final Write spec; private static final TupleTag<MutationGroup> BATCHABLE_MUTATIONS_TAG = new TupleTag<MutationGroup>("batchableMutations") {}; private static final TupleTag<Iterable<MutationGroup>> UNBATCHABLE_MUTATIONS_TAG = new TupleTag<Iterable<MutationGroup>>("unbatchableMutations") {}; private static final TupleTag<Void> MAIN_OUT_TAG = new TupleTag<Void>("mainOut") {}; private static final TupleTag<MutationGroup> FAILED_MUTATIONS_TAG = new TupleTag<MutationGroup>("failedMutations") {}; private static final SerializableCoder<MutationGroup> CODER = SerializableCoder.of(MutationGroup.class); public WriteGrouped(Write spec) { this.spec = spec; } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); spec.populateDisplayDataWithParamaters(builder); } @Override @VisibleForTesting static MutationGroup decode(byte[] bytes) { ByteArrayInputStream bis = new ByteArrayInputStream(bytes); try { return CODER.decode(bis); } catch (IOException e) { throw new RuntimeException(e); } } @VisibleForTesting static byte[] encode(MutationGroup g) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { CODER.encode(g, bos); } catch (IOException e) { throw new RuntimeException(e); } return bos.toByteArray(); } }
class WriteGrouped extends PTransform<PCollection<MutationGroup>, SpannerWriteResult> { private final Write spec; private static final TupleTag<MutationGroup> BATCHABLE_MUTATIONS_TAG = new TupleTag<MutationGroup>("batchableMutations") {}; private static final TupleTag<Iterable<MutationGroup>> UNBATCHABLE_MUTATIONS_TAG = new TupleTag<Iterable<MutationGroup>>("unbatchableMutations") {}; private static final TupleTag<Void> MAIN_OUT_TAG = new TupleTag<Void>("mainOut") {}; private static final TupleTag<MutationGroup> FAILED_MUTATIONS_TAG = new TupleTag<MutationGroup>("failedMutations") {}; private static final SerializableCoder<MutationGroup> CODER = SerializableCoder.of(MutationGroup.class); public WriteGrouped(Write spec) { this.spec = spec; } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); spec.populateDisplayDataWithParamaters(builder); } @Override @VisibleForTesting static MutationGroup decode(byte[] bytes) { ByteArrayInputStream bis = new ByteArrayInputStream(bytes); try { return CODER.decode(bis); } catch (IOException e) { throw new RuntimeException(e); } } @VisibleForTesting static byte[] encode(MutationGroup g) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { CODER.encode(g, bos); } catch (IOException e) { throw new RuntimeException(e); } return bos.toByteArray(); } }
I don't have context about the current design choice of propagating the error to the session processor, may be inherited from the legacy library.
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).isTransient()) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { return Mono.<Long>error(failure); } }))); }
&& ((AmqpException) failure).isTransient()) {
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { final long id = System.nanoTime(); LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Unable to acquire new session.", failure); return Mono.<Long>error(failure) .publishOn(Schedulers.boundedElastic()) .doOnError(e -> LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Emitting the error signal received for session acquire attempt.", e) ); } }))); }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final String TRACKING_ID_KEY = "trackingId"; private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
```suggestion Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); ```
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = (identifier.getKind() != null) ? identifier.getKind() : extractKind(identifier); if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId(), "'ID' of the CommunicationUserIdentifierModel cannot be null."); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { String phoneNumber = identifier.getPhoneNumber().getValue(); Objects.requireNonNull(phoneNumber, "'PhoneNumber' of the CommunicationUserIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationUserIdentifierModel cannot be null."); return new PhoneNumberIdentifier(phoneNumber).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId(), "'UserID' of the CommunicationUserIdentifierModel cannot be null."); Objects.requireNonNull(teamsUserIdentifierModel.getCloud(), "'Cloud' of the CommunicationUserIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationUserIdentifierModel cannot be null."); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId, "'RawID' of the CommunicationUserIdentifierModel cannot be null."); return new UnknownIdentifier(rawId); }
Objects.requireNonNull(rawId, "'RawID' of the CommunicationUserIdentifierModel cannot be null.");
public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) { if (identifier == null) { return null; } assertSingleType(identifier); String rawId = identifier.getRawId(); CommunicationIdentifierModelKind kind = (identifier.getKind() != null) ? identifier.getKind() : extractKind(identifier); if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER && identifier.getCommunicationUser() != null) { Objects.requireNonNull(identifier.getCommunicationUser().getId(), "'ID' of the CommunicationIdentifierModel cannot be null."); return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId()); } if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER && identifier.getPhoneNumber() != null) { String phoneNumber = identifier.getPhoneNumber().getValue(); Objects.requireNonNull(phoneNumber, "'PhoneNumber' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new PhoneNumberIdentifier(phoneNumber).setRawId(rawId); } if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER && identifier.getMicrosoftTeamsUser() != null) { MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser(); Objects.requireNonNull(teamsUserIdentifierModel.getUserId(), "'UserID' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(teamsUserIdentifierModel.getCloud(), "'Cloud' of the CommunicationIdentifierModel cannot be null."); Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(), teamsUserIdentifierModel.isAnonymous()) .setRawId(rawId) .setCloudEnvironment(CommunicationCloudEnvironment .fromString(teamsUserIdentifierModel.getCloud().toString())); } Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null."); return new UnknownIdentifier(rawId); }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } private static CommunicationIdentifierModelKind extractKind(CommunicationIdentifierModel identifier) { Objects.requireNonNull(identifier); if (identifier.getCommunicationUser() != null) { return CommunicationIdentifierModelKind.COMMUNICATION_USER; } if (identifier.getPhoneNumber() != null) { return CommunicationIdentifierModelKind.PHONE_NUMBER; } if (identifier.getMicrosoftTeamsUser() != null) { return CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER; } return CommunicationIdentifierModelKind.UNKNOWN; } }
class CommunicationIdentifierConverter { /** * Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}. */ /** * Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}. */ public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier) throws IllegalArgumentException { if (identifier == null) { return null; } if (identifier instanceof CommunicationUserIdentifier) { CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(communicationUserIdentifier.getRawId()) .setCommunicationUser( new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId())); } if (identifier instanceof PhoneNumberIdentifier) { PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(phoneNumberIdentifier.getRawId()) .setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber())); } if (identifier instanceof MicrosoftTeamsUserIdentifier) { MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier; return new CommunicationIdentifierModel() .setRawId(teamsUserIdentifier.getRawId()) .setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel() .setIsAnonymous(teamsUserIdentifier.isAnonymous()) .setUserId(teamsUserIdentifier.getUserId()) .setCloud(CommunicationCloudEnvironmentModel.fromString( teamsUserIdentifier.getCloudEnvironment().toString()))); } if (identifier instanceof UnknownIdentifier) { UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier; return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId()); } throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName())); } private static void assertSingleType(CommunicationIdentifierModel identifier) { CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser(); PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber(); MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser(); ArrayList<String> presentProperties = new ArrayList<>(); if (communicationUser != null) { presentProperties.add(communicationUser.getClass().getName()); } if (phoneNumber != null) { presentProperties.add(phoneNumber.getClass().getName()); } if (microsoftTeamsUser != null) { presentProperties.add(microsoftTeamsUser.getClass().getName()); } if (presentProperties.size() > 1) { throw new IllegalArgumentException( String.format( "Only one of the identifier models in %s should be present.", String.join(", ", presentProperties))); } } private static CommunicationIdentifierModelKind extractKind(CommunicationIdentifierModel identifier) { Objects.requireNonNull(identifier, "CommunicationIdentifierModel cannot be null."); if (identifier.getCommunicationUser() != null) { return CommunicationIdentifierModelKind.COMMUNICATION_USER; } if (identifier.getPhoneNumber() != null) { return CommunicationIdentifierModelKind.PHONE_NUMBER; } if (identifier.getMicrosoftTeamsUser() != null) { return CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER; } return CommunicationIdentifierModelKind.UNKNOWN; } }
not really, the destination is an address. It can be seen as a _classic_ queue or topic depending on the method used (send / request vs. publish)
protected String destinationKind(final Message message) { return "topic"; }
return "topic";
protected String destinationKind(final Message message) { return message.isSend() ? "queue" : "topic"; }
class EventBusAttributesExtractor extends MessagingAttributesExtractor<Message, Message> { private final MessageOperation operation; public EventBusAttributesExtractor(final MessageOperation operation) { this.operation = operation; } @Override public MessageOperation operation() { return operation; } @Override protected String system(final Message message) { return "vert.x"; } @Override @Override protected String destination(final Message message) { return message.address(); } @Override protected boolean temporaryDestination(final Message message) { return false; } @Override protected String protocol(final Message message) { return null; } @Override protected String protocolVersion(final Message message) { return null; } @Override protected String url(final Message message) { return null; } @Override protected String conversationId(final Message message) { return null; } @Override protected Long messagePayloadSize(final Message message) { return null; } @Override protected Long messagePayloadCompressedSize(final Message message) { return null; } @Override protected String messageId(final Message message, final Message message2) { return null; } }
class EventBusAttributesExtractor extends MessagingAttributesExtractor<Message, Message> { private final MessageOperation operation; public EventBusAttributesExtractor(final MessageOperation operation) { this.operation = operation; } @Override public MessageOperation operation() { return operation; } @Override protected String system(final Message message) { return "vert.x"; } @Override @Override protected String destination(final Message message) { return message.address(); } @Override protected boolean temporaryDestination(final Message message) { return false; } @Override protected String protocol(final Message message) { return null; } @Override protected String protocolVersion(final Message message) { return "4.0"; } @Override protected String url(final Message message) { return null; } @Override protected String conversationId(final Message message) { return message.replyAddress(); } @Override protected Long messagePayloadSize(final Message message) { return null; } @Override protected Long messagePayloadCompressedSize(final Message message) { return null; } @Override protected String messageId(final Message message, final Message message2) { return null; } }
@pedroigor @boosey Hi. I think there should be some code followed by a custom header indicating it is to do with the XHR. Otherwise SPA will try to reload even if it is a genuine `401` (whenever we decide to return `401`, example, we return `401` in case the code flow fails to complete). To be honest I'm not sure it should be `401`. At the quarkus OIDC level we want to redirect, right ? But such that if it is XHR and the option is set, then let XHR fail. Can we set `399`, at least it in the `3xx` redirect space ? That would also fit better with the fact we throw `AuthenticationRedirectException` But in any case, it should be accompanied by something like `XHR-Location: redirect_uri...` or something simpler. This can be done by adding a custom header to the response context and then throwing `AuthenticationRedirectException`
public AuthenticationRedirectException(Boolean autoRedirect, String redirectUri) { this(autoRedirect ? 302 : 444, redirectUri); }
this(autoRedirect ? 302 : 444, redirectUri);
public AuthenticationRedirectException(Boolean autoRedirect, String redirectUri) { this(autoRedirect ? 302 : 444, redirectUri); }
class AuthenticationRedirectException extends RuntimeException { int code; String redirectUri; public AuthenticationRedirectException(String redirectUri) { this(302, redirectUri); } public AuthenticationRedirectException(int code, String redirectUri) { this.code = code; this.redirectUri = redirectUri; } public int getCode() { return this.code; } public String getRedirectUri() { return redirectUri; } }
class AuthenticationRedirectException extends RuntimeException { int code; String redirectUri; public AuthenticationRedirectException(String redirectUri) { this(302, redirectUri); } public AuthenticationRedirectException(int code, String redirectUri) { this.code = code; this.redirectUri = redirectUri; } public int getCode() { return this.code; } public String getRedirectUri() { return redirectUri; } }
@challengeof Can we rename `c` to `firstChar`? And put constant on the left condition?
private static Object convertBooleanValue(final Object value) { if (value instanceof Boolean) { return value; } String stringVal = value.toString(); if (stringVal.length() > 0) { int c = Character.toLowerCase(stringVal.charAt(0)); return c == 't' || c == 'y' || c == '1' || "-1".equals(stringVal); } else { return false; } }
return c == 't' || c == 'y' || c == '1' || "-1".equals(stringVal);
private static Object convertBooleanValue(final Object value) { if (value instanceof Boolean) { return value; } String stringVal = value.toString(); if (stringVal.length() > 0) { int firstChar = Character.toLowerCase(stringVal.charAt(0)); return 't' == firstChar || 'y' == firstChar || '1' == firstChar || "-1".equals(stringVal); } else { return false; } }
class == value.getClass()) { return adjustBigDecimalResult((BigDecimal) value, needScale, scale); }
class == value.getClass()) { return adjustBigDecimalResult((BigDecimal) value, needScale, scale); }
If we consider security best practices, is it ok to have the `keyPassword` in error message and logs? @ldclakmal
public static Object decodePrivateKey(Object keyStoreValue, String keyAlias, String keyPassword) { MapValue<String, Object> keyStore = (MapValue<String, Object>) keyStoreValue; PrivateKey privateKey; File keyStoreFile = new File(CryptoUtils.substituteVariables( keyStore.get(Constants.KEY_STORE_RECORD_PATH_FIELD).toString())); try (FileInputStream fileInputStream = new FileInputStream(keyStoreFile)) { KeyStore keystore = KeyStore.getInstance(Constants.KEYSTORE_TYPE_PKCS12); try { keystore.load(fileInputStream, keyStore.get(Constants.KEY_STORE_RECORD_PASSWORD_FIELD).toString() .toCharArray()); } catch (NoSuchAlgorithmException e) { return CryptoUtils.createError("Keystore integrity check algorithm is not found: " + e.getMessage()); } privateKey = (PrivateKey) keystore.getKey(keyAlias, keyPassword.toCharArray()); if (privateKey.getAlgorithm().equals("RSA")) { MapValue<String, Object> privateKeyRecord = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.PRIVATE_KEY_RECORD); privateKeyRecord.addNativeData(Constants.NATIVE_DATA_PRIVATE_KEY, privateKey); privateKeyRecord.put(Constants.PRIVATE_KEY_RECORD_ALGORITHM_FIELD, privateKey.getAlgorithm()); return privateKeyRecord; } else { return CryptoUtils.createError("Not a valid RSA key"); } } catch (FileNotFoundException e) { throw CryptoUtils.createError("PKCS12 key store not found at: " + keyStoreFile.getAbsoluteFile()); } catch (KeyStoreException | CertificateException | IOException e) { throw CryptoUtils.createError("Unable to open keystore: " + e.getMessage()); } catch (NoSuchAlgorithmException e) { return CryptoUtils.createError("Algorithm for key recovery is not found: " + e.getMessage()); } catch (NullPointerException e) { return CryptoUtils.createError("Key cannot be recovered by using given key alias: [" + keyAlias + "] and key password: [" + keyPassword + "]"); } catch (UnrecoverableKeyException e) { return CryptoUtils.createError("Key cannot be recovered: " + e.getMessage()); } }
"] and key password: [" + keyPassword + "]");
public static Object decodePrivateKey(Object keyStoreValue, String keyAlias, String keyPassword) { MapValue<String, Object> keyStore = (MapValue<String, Object>) keyStoreValue; PrivateKey privateKey; File keyStoreFile = new File(CryptoUtils.substituteVariables( keyStore.get(Constants.KEY_STORE_RECORD_PATH_FIELD).toString())); try (FileInputStream fileInputStream = new FileInputStream(keyStoreFile)) { KeyStore keystore = KeyStore.getInstance(Constants.KEYSTORE_TYPE_PKCS12); try { keystore.load(fileInputStream, keyStore.get(Constants.KEY_STORE_RECORD_PASSWORD_FIELD).toString() .toCharArray()); } catch (NoSuchAlgorithmException e) { return CryptoUtils.createError("Keystore integrity check algorithm is not found: " + e.getMessage()); } privateKey = (PrivateKey) keystore.getKey(keyAlias, keyPassword.toCharArray()); if (privateKey.getAlgorithm().equals("RSA")) { MapValue<String, Object> privateKeyRecord = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.PRIVATE_KEY_RECORD); privateKeyRecord.addNativeData(Constants.NATIVE_DATA_PRIVATE_KEY, privateKey); privateKeyRecord.put(Constants.PRIVATE_KEY_RECORD_ALGORITHM_FIELD, privateKey.getAlgorithm()); return privateKeyRecord; } else { return CryptoUtils.createError("Not a valid RSA key"); } } catch (FileNotFoundException e) { throw CryptoUtils.createError("PKCS12 key store not found at: " + keyStoreFile.getAbsoluteFile()); } catch (KeyStoreException | CertificateException | IOException e) { throw CryptoUtils.createError("Unable to open keystore: " + e.getMessage()); } catch (NoSuchAlgorithmException e) { return CryptoUtils.createError("Algorithm for key recovery is not found: " + e.getMessage()); } catch (NullPointerException e) { return CryptoUtils.createError("Key cannot be recovered by using given key alias: [" + keyAlias + "] and key password"); } catch (UnrecoverableKeyException e) { return CryptoUtils.createError("Key cannot be recovered: " + e.getMessage()); } }
class Decode { @SuppressWarnings("unchecked") @SuppressWarnings("unchecked") public static Object decodePublicKey(Object keyStoreValue, String keyAlias) { MapValue<String, Object> keyStore = (MapValue<String, Object>) keyStoreValue; File keyStoreFile = new File( CryptoUtils.substituteVariables(keyStore.get(Constants.KEY_STORE_RECORD_PATH_FIELD).toString())); try (FileInputStream fileInputStream = new FileInputStream(keyStoreFile)) { KeyStore keystore = KeyStore.getInstance(Constants.KEYSTORE_TYPE_PKCS12); try { keystore.load(fileInputStream, keyStore.get(Constants.KEY_STORE_RECORD_PASSWORD_FIELD).toString() .toCharArray()); } catch (NoSuchAlgorithmException e) { throw CryptoUtils.createError("Keystore integrity check algorithm is not found: " + e.getMessage()); } Certificate certificate = keystore.getCertificate(keyAlias); MapValue<String, Object> certificateBMap = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.CERTIFICATE_RECORD); if (certificate instanceof X509Certificate) { X509Certificate x509Certificate = (X509Certificate) certificate; certificateBMap.put(Constants.CERTIFICATE_RECORD_ISSUER_FIELD, x509Certificate.getIssuerX500Principal().getName()); certificateBMap.put(Constants.CERTIFICATE_RECORD_SUBJECT_FIELD, x509Certificate.getSubjectX500Principal().getName()); certificateBMap.put(Constants.CERTIFICATE_RECORD_VERSION_FIELD, x509Certificate.getVersion()); certificateBMap.put(Constants.CERTIFICATE_RECORD_SERIAL_FIELD, x509Certificate.getSerialNumber().longValue()); certificateBMap.put(Constants.CERTIFICATE_RECORD_NOT_BEFORE_FIELD, TimeUtils .createTimeRecord(TimeUtils.getTimeZoneRecord(), TimeUtils.getTimeRecord(), x509Certificate.getNotBefore().getTime(), Constants.TIMEZONE_GMT)); certificateBMap.put(Constants.CERTIFICATE_RECORD_NOT_AFTER_FIELD, TimeUtils .createTimeRecord(TimeUtils.getTimeZoneRecord(), TimeUtils.getTimeRecord(), x509Certificate.getNotAfter().getTime(), Constants.TIMEZONE_GMT)); certificateBMap.put(Constants.CERTIFICATE_RECORD_SIGNATURE_FIELD, new ArrayValueImpl(x509Certificate.getSignature())); certificateBMap.put(Constants.CERTIFICATE_RECORD_SIGNATURE_ALG_FIELD, x509Certificate.getSigAlgName()); } PublicKey publicKey = certificate.getPublicKey(); if (publicKey.getAlgorithm().equals("RSA")) { MapValue<String, Object> publicKeyMap = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.PUBLIC_KEY_RECORD); publicKeyMap.addNativeData(Constants.NATIVE_DATA_PUBLIC_KEY, publicKey); publicKeyMap.addNativeData(Constants.NATIVE_DATA_PUBLIC_KEY_CERTIFICATE, certificate); publicKeyMap.put(Constants.PUBLIC_KEY_RECORD_ALGORITHM_FIELD, publicKey.getAlgorithm()); if (certificateBMap.size() > 0) { publicKeyMap.put(Constants.PUBLIC_KEY_RECORD_CERTIFICATE_FIELD, certificateBMap); } return publicKeyMap; } else { return CryptoUtils.createError("Not a valid RSA key"); } } catch (FileNotFoundException e) { throw CryptoUtils.createError("PKCS12 key store not found at: " + keyStoreFile.getAbsoluteFile()); } catch (KeyStoreException | CertificateException | IOException e) { throw CryptoUtils.createError("Unable to open keystore: " + e.getMessage()); } } }
class Decode { @SuppressWarnings("unchecked") @SuppressWarnings("unchecked") public static Object decodePublicKey(Object keyStoreValue, String keyAlias) { MapValue<String, Object> keyStore = (MapValue<String, Object>) keyStoreValue; File keyStoreFile = new File( CryptoUtils.substituteVariables(keyStore.get(Constants.KEY_STORE_RECORD_PATH_FIELD).toString())); try (FileInputStream fileInputStream = new FileInputStream(keyStoreFile)) { KeyStore keystore = KeyStore.getInstance(Constants.KEYSTORE_TYPE_PKCS12); try { keystore.load(fileInputStream, keyStore.get(Constants.KEY_STORE_RECORD_PASSWORD_FIELD).toString() .toCharArray()); } catch (NoSuchAlgorithmException e) { throw CryptoUtils.createError("Keystore integrity check algorithm is not found: " + e.getMessage()); } Certificate certificate = keystore.getCertificate(keyAlias); MapValue<String, Object> certificateBMap = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.CERTIFICATE_RECORD); if (certificate instanceof X509Certificate) { X509Certificate x509Certificate = (X509Certificate) certificate; certificateBMap.put(Constants.CERTIFICATE_RECORD_ISSUER_FIELD, x509Certificate.getIssuerX500Principal().getName()); certificateBMap.put(Constants.CERTIFICATE_RECORD_SUBJECT_FIELD, x509Certificate.getSubjectX500Principal().getName()); certificateBMap.put(Constants.CERTIFICATE_RECORD_VERSION_FIELD, x509Certificate.getVersion()); certificateBMap.put(Constants.CERTIFICATE_RECORD_SERIAL_FIELD, x509Certificate.getSerialNumber().longValue()); certificateBMap.put(Constants.CERTIFICATE_RECORD_NOT_BEFORE_FIELD, TimeUtils .createTimeRecord(TimeUtils.getTimeZoneRecord(), TimeUtils.getTimeRecord(), x509Certificate.getNotBefore().getTime(), Constants.TIMEZONE_GMT)); certificateBMap.put(Constants.CERTIFICATE_RECORD_NOT_AFTER_FIELD, TimeUtils .createTimeRecord(TimeUtils.getTimeZoneRecord(), TimeUtils.getTimeRecord(), x509Certificate.getNotAfter().getTime(), Constants.TIMEZONE_GMT)); certificateBMap.put(Constants.CERTIFICATE_RECORD_SIGNATURE_FIELD, new ArrayValueImpl(x509Certificate.getSignature())); certificateBMap.put(Constants.CERTIFICATE_RECORD_SIGNATURE_ALG_FIELD, x509Certificate.getSigAlgName()); } PublicKey publicKey = certificate.getPublicKey(); if (publicKey.getAlgorithm().equals("RSA")) { MapValue<String, Object> publicKeyMap = BallerinaValues. createRecordValue(Constants.CRYPTO_PACKAGE_ID, Constants.PUBLIC_KEY_RECORD); publicKeyMap.addNativeData(Constants.NATIVE_DATA_PUBLIC_KEY, publicKey); publicKeyMap.addNativeData(Constants.NATIVE_DATA_PUBLIC_KEY_CERTIFICATE, certificate); publicKeyMap.put(Constants.PUBLIC_KEY_RECORD_ALGORITHM_FIELD, publicKey.getAlgorithm()); if (certificateBMap.size() > 0) { publicKeyMap.put(Constants.PUBLIC_KEY_RECORD_CERTIFICATE_FIELD, certificateBMap); } return publicKeyMap; } else { return CryptoUtils.createError("Not a valid RSA key"); } } catch (FileNotFoundException e) { throw CryptoUtils.createError("PKCS12 key store not found at: " + keyStoreFile.getAbsoluteFile()); } catch (KeyStoreException | CertificateException | IOException e) { throw CryptoUtils.createError("Unable to open keystore: " + e.getMessage()); } } }
Can this method call the method on L204
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); }
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); }
class CosmosAsyncClient implements Closeable { private final Configs configs; private final AsyncDocumentClient asyncDocumentClient; private final String serviceEndpoint; private final String keyOrResourceToken; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel desiredConsistencyLevel; private final List<CosmosPermissionProperties> permissions; private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; private final AzureKeyCredential credential; private final boolean sessionCapturingOverride; private final boolean enableTransportClientSharing; private final TracerProvider tracerProvider; private final boolean contentResponseOnWriteEnabled; CosmosAsyncClient(CosmosClientBuilder builder) { this.configs = builder.configs(); this.serviceEndpoint = builder.getEndpoint(); this.keyOrResourceToken = builder.getKey(); this.connectionPolicy = builder.getConnectionPolicy(); this.desiredConsistencyLevel = builder.getConsistencyLevel(); this.permissions = builder.getPermissions(); this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver(); this.credential = builder.getCredential(); this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled(); this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled(); this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled(); this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class)); this.asyncDocumentClient = new AsyncDocumentClient.Builder() .withServiceEndpoint(this.serviceEndpoint) .withMasterKeyOrResourceToken(this.keyOrResourceToken) .withConnectionPolicy(this.connectionPolicy) .withConsistencyLevel(this.desiredConsistencyLevel) .withSessionCapturingOverride(this.sessionCapturingOverride) .withConfigs(this.configs) .withTokenResolver(this.cosmosAuthorizationTokenResolver) .withCredential(this.credential) .withTransportClientSharing(this.enableTransportClientSharing) .withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled) .build(); } AsyncDocumentClient getContextClient() { return this.asyncDocumentClient; } /** * Monitor Cosmos client performance and resource utilization using the specified meter registry. * * @param registry meter registry to use for performance monitoring. */ static void setMonitorTelemetry(MeterRegistry registry) { RntbdMetrics.add(registry); } /** * Get the service endpoint. * * @return the service endpoint. */ String getServiceEndpoint() { return serviceEndpoint; } /** * Gets the key or resource token. * * @return get the key or resource token. */ String getKeyOrResourceToken() { return keyOrResourceToken; } /** * Get the connection policy. * * @return {@link ConnectionPolicy}. */ ConnectionPolicy getConnectionPolicy() { return connectionPolicy; } /** * Gets the consistency level. * * @return the {@link ConsistencyLevel}. */ ConsistencyLevel getDesiredConsistencyLevel() { return desiredConsistencyLevel; } /** * Gets the permission list. * * @return the permission list. */ List<CosmosPermissionProperties> getPermissions() { return permissions; } AsyncDocumentClient getDocClientWrapper() { return asyncDocumentClient; } /** * Gets the configs. * * @return the configs. */ Configs getConfigs() { return configs; } /** * Gets the token resolver. * * @return the token resolver. */ CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() { return cosmosAuthorizationTokenResolver; } /** * Gets the azure key credential. * * @return azure key credential. */ AzureKeyCredential credential() { return credential; } /** * Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response * in case of Create, Update and Delete operations on CosmosItem. * * If set to false (which is by default), this removes the resource from response. It reduces networking * and CPU load by not sending the resource back over the network and serializing it * on the client. * * By-default, this is false. * * @return a boolean indicating whether resource will be included in the response or not. */ boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } /** * CREATE a Database if it does not already exist on the service. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param databaseProperties CosmosDatabaseProperties. * @return a {@link Mono} containing the cosmos database response with the created or existing database or * an error. */ public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()), null, context)); } /** * Create a Database if it does not already exist on the service. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param id the id of the database. * @return a {@link Mono} containing the cosmos database response with the created or existing database or * an error. */ /** * Create a Database if it does not already exist on the service. * <p> * The throughputProperties will only be used if the specified database * does not exist and therefor a new database will be created with throughputProperties. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param id the id. * @param throughputProperties the throughputProperties. * @return the mono. */ public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), throughputProperties, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param options {@link CosmosDatabaseRequestOptions}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, CosmosDatabaseRequestOptions options) { final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options; Database wrappedDatabase = new Database(); wrappedDatabase.setId(databaseProperties.getId()); return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) { return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions()); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param id id of the database. * @return a {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(String id) { return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions()); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param throughputProperties the throughput properties for the database. * @param options {@link CosmosDatabaseRequestOptions}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties, CosmosDatabaseRequestOptions options) { if (options == null) { options = new CosmosDatabaseRequestOptions(); } ModelBridgeInternal.setThroughputProperties(options, throughputProperties); Database wrappedDatabase = new Database(); wrappedDatabase.setId(databaseProperties.getId()); final CosmosDatabaseRequestOptions requestOptions = options; return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param throughputProperties the throughput properties for the database. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) { CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions(); ModelBridgeInternal.setThroughputProperties(options, throughputProperties); return createDatabase(databaseProperties, options); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param id id of the database. * @param throughput the throughput for the database. * @return a {@link Mono} containing the single cosmos database response with the created database or an error. */ Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) { CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions(); ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput)); return createDatabase(new CosmosDatabaseProperties(id), options); } /** * Creates a database. * * @param id the id. * @param throughputProperties the throughputProperties. * @return the mono. */ public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) { CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions(); ModelBridgeInternal.setThroughputProperties(options, throughputProperties); return createDatabase(new CosmosDatabaseProperties(id), options); } /** * Reads all databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param options {@link CosmosQueryRequestOptions} * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) { return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> { pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null); setContinuationTokenAndMaxItemCount(pagedFluxOptions, options); return getDocClientWrapper().readDatabases(options) .map(response -> BridgeInternal.createFeedResponse( ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()), response.getResponseHeaders())); }); } /** * Reads all databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() { return readAllDatabases(new CosmosQueryRequestOptions()); } /** * Query for databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param query the query. * @param options the feed options. * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabasesInternal(new SqlQuerySpec(query), options); } /** * Query for databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param querySpec the SQL query specification. * @param options the feed options. * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return queryDatabasesInternal(querySpec, options); } /** * Gets a database object without making a service call. * * @param id name of the database. * @return {@link CosmosAsyncDatabase}. */ public CosmosAsyncDatabase getDatabase(String id) { return new CosmosAsyncDatabase(id, this); } /** * Close this {@link CosmosAsyncClient} instance and cleans up the resources. */ @Override public void close() { asyncDocumentClient.close(); } TracerProvider getTracerProvider(){ return this.tracerProvider; } private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){ return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> { pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); setContinuationTokenAndMaxItemCount(pagedFluxOptions, options); return getDocClientWrapper().queryDatabases(querySpec, options) .map(response -> BridgeInternal.createFeedResponse( ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()), response.getResponseHeaders())); }); } private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) { String spanName = "createDatabaseIfNotExists." + database.getId(); Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL); Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext); return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono, context, spanName, database.getId(), this.serviceEndpoint); } private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) { return responseMono.onErrorResume(exception -> { final Throwable unwrappedException = Exceptions.unwrap(exception); if (unwrappedException instanceof CosmosException) { final CosmosException cosmosException = (CosmosException) unwrappedException; if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) { CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions(); if(throughputProperties != null) { ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties); } if (context != null) { Database wrappedDatabase = new Database(); wrappedDatabase.setId(database.getId()); return createDatabaseInternal(wrappedDatabase, requestOptions, context); } return createDatabase(new CosmosDatabaseProperties(database.getId()), requestOptions); } } return Mono.error(unwrappedException); }); } private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options, Context context) { String spanName = "createDatabase." + database.getId(); Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options); return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono, context, spanName, database.getId(), this.serviceEndpoint); } private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) { return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options)) .map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse)) .single(); } }
class CosmosAsyncClient implements Closeable { private final Configs configs; private final AsyncDocumentClient asyncDocumentClient; private final String serviceEndpoint; private final String keyOrResourceToken; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel desiredConsistencyLevel; private final List<CosmosPermissionProperties> permissions; private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; private final AzureKeyCredential credential; private final boolean sessionCapturingOverride; private final boolean enableTransportClientSharing; private final TracerProvider tracerProvider; private final DataEncryptionKeyProvider dataEncryptionKeyProvider; private final boolean contentResponseOnWriteEnabled; private static final Tracer TRACER; static { ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class); Iterator<?> iterator = serviceLoader.iterator(); if (iterator.hasNext()) { TRACER = serviceLoader.iterator().next(); } else { TRACER = null; } } CosmosAsyncClient(CosmosClientBuilder builder) { this.configs = builder.configs(); this.serviceEndpoint = builder.getEndpoint(); this.keyOrResourceToken = builder.getKey(); this.connectionPolicy = builder.getConnectionPolicy(); this.desiredConsistencyLevel = builder.getConsistencyLevel(); this.permissions = builder.getPermissions(); this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver(); this.credential = builder.getCredential(); this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled(); this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider(); this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled(); this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled(); this.tracerProvider = new TracerProvider(TRACER); this.asyncDocumentClient = new AsyncDocumentClient.Builder() .withServiceEndpoint(this.serviceEndpoint) .withMasterKeyOrResourceToken(this.keyOrResourceToken) .withConnectionPolicy(this.connectionPolicy) .withConsistencyLevel(this.desiredConsistencyLevel) .withSessionCapturingOverride(this.sessionCapturingOverride) .withConfigs(this.configs) .withTokenResolver(this.cosmosAuthorizationTokenResolver) .withCredential(this.credential) .withTransportClientSharing(this.enableTransportClientSharing) .withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider) .withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled) .build(); } AsyncDocumentClient getContextClient() { return this.asyncDocumentClient; } /** * Monitor Cosmos client performance and resource utilization using the specified meter registry. * * @param registry meter registry to use for performance monitoring. */ static void setMonitorTelemetry(MeterRegistry registry) { RntbdMetrics.add(registry); } /** * Get the service endpoint. * * @return the service endpoint. */ String getServiceEndpoint() { return serviceEndpoint; } /** * Gets the key or resource token. * * @return get the key or resource token. */ String getKeyOrResourceToken() { return keyOrResourceToken; } /** * Get the connection policy. * * @return {@link ConnectionPolicy}. */ ConnectionPolicy getConnectionPolicy() { return connectionPolicy; } /** * Gets the consistency level. * * @return the {@link ConsistencyLevel}. */ ConsistencyLevel getDesiredConsistencyLevel() { return desiredConsistencyLevel; } /** * Gets the permission list. * * @return the permission list. */ List<CosmosPermissionProperties> getPermissions() { return permissions; } AsyncDocumentClient getDocClientWrapper() { return asyncDocumentClient; } /** * Gets the configs. * * @return the configs. */ Configs getConfigs() { return configs; } /** * Gets the token resolver. * * @return the token resolver. */ CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() { return cosmosAuthorizationTokenResolver; } /** * Gets the azure key credential. * * @return azure key credential. */ AzureKeyCredential credential() { return credential; } /** * Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response * in case of Create, Update and Delete operations on CosmosItem. * * If set to false (which is by default), this removes the resource from response. It reduces networking * and CPU load by not sending the resource back over the network and serializing it * on the client. * * By-default, this is false. * * @return a boolean indicating whether resource will be included in the response or not. */ boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } /** * CREATE a Database if it does not already exist on the service. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param databaseProperties CosmosDatabaseProperties. * @return a {@link Mono} containing the cosmos database response with the created or existing database or * an error. */ public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()), null, context)); } /** * Create a Database if it does not already exist on the service. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param id the id of the database. * @return a {@link Mono} containing the cosmos database response with the created or existing database or * an error. */ /** * Create a Database if it does not already exist on the service. * <p> * The throughputProperties will only be used if the specified database * does not exist and therefor a new database will be created with throughputProperties. * <p> * The {@link Mono} upon successful completion will contain a single cosmos database response with the * created or existing database. * * @param id the id. * @param throughputProperties the throughputProperties. * @return the mono. */ public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) { return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), throughputProperties, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param options {@link CosmosDatabaseRequestOptions}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, CosmosDatabaseRequestOptions options) { final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options; Database wrappedDatabase = new Database(); wrappedDatabase.setId(databaseProperties.getId()); return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) { return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions()); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param id id of the database. * @return a {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(String id) { return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions()); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param throughputProperties the throughput properties for the database. * @param options {@link CosmosDatabaseRequestOptions}. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties, CosmosDatabaseRequestOptions options) { if (options == null) { options = new CosmosDatabaseRequestOptions(); } ModelBridgeInternal.setThroughputProperties(options, throughputProperties); Database wrappedDatabase = new Database(); wrappedDatabase.setId(databaseProperties.getId()); final CosmosDatabaseRequestOptions requestOptions = options; return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context)); } /** * Creates a database. * <p> * After subscription the operation will be performed. * The {@link Mono} upon successful completion will contain a single resource response with the * created database. * In case of failure the {@link Mono} will error. * * @param databaseProperties {@link CosmosDatabaseProperties}. * @param throughputProperties the throughput properties for the database. * @return an {@link Mono} containing the single cosmos database response with the created database or an error. */ public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) { CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions(); ModelBridgeInternal.setThroughputProperties(options, throughputProperties); return createDatabase(databaseProperties, options); } /** * Creates a database. * * @param id the id. * @param throughputProperties the throughputProperties. * @return the mono. */ public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) { CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions(); ModelBridgeInternal.setThroughputProperties(options, throughputProperties); return createDatabase(new CosmosDatabaseProperties(id), options); } /** * Reads all databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param options {@link CosmosQueryRequestOptions} * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) { return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> { pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null); setContinuationTokenAndMaxItemCount(pagedFluxOptions, options); return getDocClientWrapper().readDatabases(options) .map(response -> BridgeInternal.createFeedResponse( ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()), response.getResponseHeaders())); }); } /** * Reads all databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() { return readAllDatabases(new CosmosQueryRequestOptions()); } /** * Query for databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param query the query. * @param options the feed options. * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabasesInternal(new SqlQuerySpec(query), options); } /** * Query for databases. * <p> * After subscription the operation will be performed. * The {@link CosmosPagedFlux} will contain one or several feed response of the read databases. * In case of failure the {@link CosmosPagedFlux} will error. * * @param querySpec the SQL query specification. * @param options the feed options. * @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error. */ public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return queryDatabasesInternal(querySpec, options); } /** * Gets a database object without making a service call. * * @param id name of the database. * @return {@link CosmosAsyncDatabase}. */ public CosmosAsyncDatabase getDatabase(String id) { return new CosmosAsyncDatabase(id, this); } /** * Close this {@link CosmosAsyncClient} instance and cleans up the resources. */ @Override public void close() { asyncDocumentClient.close(); } TracerProvider getTracerProvider(){ return this.tracerProvider; } private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){ return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> { pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); setContinuationTokenAndMaxItemCount(pagedFluxOptions, options); return getDocClientWrapper().queryDatabases(querySpec, options) .map(response -> BridgeInternal.createFeedResponse( ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()), response.getResponseHeaders())); }); } private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) { String spanName = "createDatabaseIfNotExists." + database.getId(); Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL); Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext).onErrorResume(exception -> { final Throwable unwrappedException = Exceptions.unwrap(exception); if (unwrappedException instanceof CosmosException) { final CosmosException cosmosException = (CosmosException) unwrappedException; if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) { CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions(); if (throughputProperties != null) { ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties); } Database wrappedDatabase = new Database(); wrappedDatabase.setId(database.getId()); return createDatabaseInternal(wrappedDatabase, requestOptions, nestedContext); } } return Mono.error(unwrappedException); }); return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono, context, spanName, database.getId(), this.serviceEndpoint); } private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options, Context context) { String spanName = "createDatabase." + database.getId(); Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options)) .map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse)) .single(); return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono, context, spanName, database.getId(), this.serviceEndpoint); } }
Should we use `@RepeatedTest(100)` instead?
void testPost() { StringBuilder sb = new StringBuilder(); for (int i = 0; i < 100; ++i) { given() .header("Authorization", "Basic am9objpqb2hu") .body("Bill") .contentType(ContentType.TEXT) .when() .post("/foo/") .then() .statusCode(200) .body(is("hello Bill")); } }
void testPost() { given() .header("Authorization", "Basic am9objpqb2hu") .body("Bill") .contentType(ContentType.TEXT) .when() .post("/foo/") .then() .statusCode(200) .body(is("hello Bill")); }
class BaseAuthTest { @Test @Test void testGet() { given() .header("Authorization", "Basic am9objpqb2hu") .when() .get("/foo/") .then() .statusCode(200) .body(is("hello")); } }
class BaseAuthTest { @Test @RepeatedTest(100) @Test void testGet() { given() .header("Authorization", "Basic am9objpqb2hu") .when() .get("/foo/") .then() .statusCode(200) .body(is("hello")); } }
is ctx.getText() giving the orginal string value. ? ```suggestion ctx.getText()); ```
public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (ctx.exception != null) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, getOriginalIntegerValue(integerLiteralContext)); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, Double.parseDouble(getNodeValue(ctx, node)), node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, Double.parseDouble(getHexNodeValue(ctx, node)), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null || ctx.emptyTupleLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } else if ((node = ctx.SymbolicStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length()); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } }
getOriginalIntegerValue(integerLiteralContext));
public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (ctx.exception != null) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, Double.parseDouble(getNodeValue(ctx, node)), node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, Double.parseDouble(getHexNodeValue(ctx, node)), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null || ctx.emptyTupleLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } else if ((node = ctx.SymbolicStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length()); text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_EXTERN = "extern"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitSimpleParameter(BallerinaParser.SimpleParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVar(getCurrentPos(ctx), getWS(ctx), null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (ctx.exception != null) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().Identifier().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (ctx.exception != null) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void enterServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean constrained = ctx.nameReference() != null; this.pkgBuilder.endServiceDef(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), constrained); } @Override public void exitServiceEndpointAttachments(BallerinaParser.ServiceEndpointAttachmentsContext ctx) { if (ctx.exception != null) { return; } if (ctx.recordLiteral() != null) { this.pkgBuilder.addAnonymousEndpointBind(getWS(ctx)); return; } this.pkgBuilder.addServiceEndpointAttachments(ctx.nameReference().size(), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addServiceBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterResourceDefinition(BallerinaParser.ResourceDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startResourceDef(); } /** * {@inheritDoc} */ @Override public void exitResourceDefinition(BallerinaParser.ResourceDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean markdownDocExists = ctx.documentationString() != null; boolean isDeprecated = ctx.deprecatedAttachment() != null; boolean hasParameters = ctx.resourceParameterList() != null; this.pkgBuilder.endResourceDef(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), markdownDocExists, isDeprecated, hasParameters); } @Override public void enterResourceParameterList(BallerinaParser.ResourceParameterListContext ctx) { if (ctx.exception != null) { return; } final BallerinaParser.ResourceDefinitionContext parent = (BallerinaParser.ResourceDefinitionContext) ctx.parent; this.pkgBuilder.addResourceAnnotation(parent.annotationAttachment().size()); } @Override public void exitResourceParameterList(BallerinaParser.ResourceParameterListContext ctx) { if (ctx.exception != null) { return; } final boolean isEndpointDefined = ctx.ENDPOINT() != null; if (isEndpointDefined) { this.pkgBuilder.addEndpointVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } int nativeKWTokenIndex = 0; boolean publicFunc = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); if (publicFunc) { nativeKWTokenIndex = 1; } boolean nativeFunc = KEYWORD_EXTERN.equals(ctx.getChild(nativeKWTokenIndex).getText()); boolean bodyExists = ctx.callableUnitBody() != null; if (ctx.Identifier() != null) { this.pkgBuilder.endObjectOuterFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, nativeFunc, bodyExists, ctx.Identifier().getText()); return; } boolean isReceiverAttached = ctx.typeName() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, nativeFunc, bodyExists, isReceiverAttached, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); this.pkgBuilder.startObjFunctionList(); this.pkgBuilder.startFieldBlockList(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (ctx.exception != null) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract); } /** * {@inheritDoc} */ @Override public void enterObjectInitializer(BallerinaParser.ObjectInitializerContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectInitializer(BallerinaParser.ObjectInitializerContext ctx) { if (ctx.exception != null) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectInitFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.NEW().getText(), publicFunc, bodyExists, markdownDocExists, false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitObjectInitializerParameterList(BallerinaParser.ObjectInitializerParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endObjectInitParamList(getWS(ctx), ctx.objectParameterList() != null, ctx.objectParameterList() != null && ctx.objectParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (ctx.exception != null) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, ctx.annotationAttachment().size(), false); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (ctx.exception != null) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, deprecatedDocExists, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectParameterList(BallerinaParser.ObjectParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitObjectParameter(BallerinaParser.ObjectParameterContext ctx) { if (ctx.exception != null) { return; } boolean isField = ctx.typeName() == null; this.pkgBuilder.addObjectParameter(getCurrentPos(ctx), getWS(ctx), isField, ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitObjectDefaultableParameter(BallerinaParser.ObjectDefaultableParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, nativeFunc, bodyExists, markdownDocExists, deprecatedDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.userDefineTypeName() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicAnnotation, isTypeAttached); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicVar = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.expression() != null, publicVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAttachPoint(AttachPoint.getAttachmentPoint(ctx.getText())); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWorker(); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (ctx.exception != null) { return; } String workerName = null; if (ctx.workerDefinition() != null) { workerName = ctx.workerDefinition().Identifier().getText(); } this.pkgBuilder.addWorker(getCurrentPos(ctx), getWS(ctx), workerName); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1) instanceof BallerinaParser.SealedLiteralContext) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 3; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 3; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.typeName().size()); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (ctx.exception != null) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; boolean sealed = hasRestField ? ctx.recordRestFieldDefinition().sealedLiteral() != null : false; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, sealed, hasRestField); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.functionTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); if (ctx.nameReference() != null) { this.pkgBuilder.addConstraintType(getCurrentPos(ctx), getWS(ctx), typeName); } else if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(getCurrentPos(ctx), getWS(ctx), typeName); } } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (ctx.exception != null) { return; } boolean paramsAvail = false, paramsTypeOnly = false, retParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; paramsTypeOnly = true; } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (ctx.exception != null) { return; } boolean exprAvailable = ctx.ASSIGN() != null; this.pkgBuilder.addVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), exprAvailable, false); } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx)); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (ctx.exception != null) { return; } String columnName = ctx.getChild(0).getText(); boolean keyColumn = ctx.PRIMARYKEY() != null; if (keyColumn) { columnName = ctx.getChild(1).getText(); this.pkgBuilder.addTableColumn(columnName); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { this.pkgBuilder.addTableColumn(columnName); } } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (ctx.exception != null) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitArrayLiteral(BallerinaParser.ArrayLiteralContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addArrayInitExpr(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (ctx.exception != null) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitEndpointDeclaration(BallerinaParser.EndpointDeclarationContext ctx) { if (ctx.exception != null) { return; } String endpointName = ctx.Identifier().getText(); boolean isInitExprExist = ctx.endpointInitlization() != null; this.pkgBuilder.addEndpointDefinition(getCurrentPos(ctx), getWS(ctx), endpointName, isInitExprExist); } @Override public void exitChannelType(BallerinaParser.ChannelTypeContext ctx) { if (ctx.exception != null) { return; } String typeName = ctx.getChild(0).getText(); this.pkgBuilder.addConstraintTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), typeName); } @Override public void exitEndpointType(BallerinaParser.EndpointTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addEndpointType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGlobalEndpointDefinition(BallerinaParser.GlobalEndpointDefinitionContext ctx) { if (ctx.exception != null) { return; } if (KEYWORD_PUBLIC.equals(ctx.getChild(0).getText())) { this.pkgBuilder.markLastEndpointAsPublic(); } } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (ctx.exception != null) { return; } boolean isVarDeclaration = false; if (ctx.VAR() != null) { isVarDeclaration = true; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx), isVarDeclaration); } @Override public void exitTupleDestructuringStatement(BallerinaParser.TupleDestructuringStatementContext ctx) { if (ctx.exception != null) { return; } boolean isVarDeclaration = false; boolean isVarExist = ctx.variableReferenceList() != null; if (ctx.VAR() != null) { isVarDeclaration = true; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx), isVarExist, isVarDeclaration); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (ctx.exception != null) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPostIncrementStatement(BallerinaParser.PostIncrementStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addPostIncrementStatement(getCurrentPos(ctx), getWS(ctx), ctx.postArithmeticOperator().getText().substring(0, 1)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addMatchStmtPattern(getCurrentPos(ctx), getWS(ctx), identifier); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addForeachStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinClause(BallerinaParser.JoinClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startJoinCause(); } @Override public void exitJoinClause(BallerinaParser.JoinClauseContext ctx) { this.pkgBuilder.addJoinCause(this.getWS(ctx), ctx.Identifier().getText()); } @Override public void exitAnyJoinCondition(BallerinaParser.AnyJoinConditionContext ctx) { if (ctx.exception != null) { return; } List<String> workerNames = new ArrayList<>(); if (ctx.Identifier() != null) { workerNames = ctx.Identifier().stream().map(TerminalNode::getText).collect(Collectors.toList()); } int joinCount = 0; Object value; if ((value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { if (value instanceof Long) { try { joinCount = ((Long) value).intValue(); } catch (NumberFormatException ex) { } } } this.pkgBuilder.addJoinCondition(getWS(ctx), "SOME", workerNames, joinCount); } @Override public void exitAllJoinCondition(BallerinaParser.AllJoinConditionContext ctx) { if (ctx.exception != null) { return; } List<String> workerNames = new ArrayList<>(); if (ctx.Identifier() != null) { workerNames = ctx.Identifier().stream().map(TerminalNode::getText).collect(Collectors.toList()); } this.pkgBuilder.addJoinCondition(getWS(ctx), "ALL", workerNames, -1); } @Override public void enterTimeoutClause(BallerinaParser.TimeoutClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTimeoutCause(); } @Override public void exitTimeoutClause(BallerinaParser.TimeoutClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTimeoutCause(this.getWS(ctx), ctx.Identifier().getText()); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (ctx.exception != null) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitInvokeWorker(BallerinaParser.InvokeWorkerContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx .expression().size() > 1); } @Override public void exitInvokeFork(BallerinaParser.InvokeForkContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), "FORK", true, false); } @Override public void exitWorkerReply(BallerinaParser.WorkerReplyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerReceiveStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx .expression().size() > 1); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (ctx.exception != null) { return; } FieldContext field = ctx.field(); String fieldName; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldType, ctx.field().NOT() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTransactionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDoneStatement(BallerinaParser.DoneStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDoneStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitOncommitStatement(BallerinaParser.OncommitStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addCommittedBlock(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitOnabortStatement(BallerinaParser.OnabortStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAbortedBlock(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (ctx.exception != null) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; this.pkgBuilder.addXMLNSDeclaration(getCurrentPos(ctx), getWS(ctx), namespaceUri, prefix, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } /** * {@inheritDoc} */ @Override public void exitTypeAccessExpression(BallerinaParser.TypeAccessExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (ctx.exception != null) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitBracedOrTupleExpression(BallerinaParser.BracedOrTupleExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBracedOrTupleExpression(getCurrentPos(ctx), getWS(ctx), ctx.expression().size()); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (ctx.exception != null) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (ctx.exception != null) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); endingString = endingString.substring(0, endingString.length() - 3); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (ctx.exception != null) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (ctx.exception != null) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (ctx.exception != null) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (ctx.exception != null) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (ctx.exception != null) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.expression() != null) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void exitTableQueryExpression(BallerinaParser.TableQueryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTableQueryExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOrderByClauseNode(getCurrentPos(ctx)); } @Override public void exitOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endOrderByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterLimitClause(BallerinaParser.LimitClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLimitClauseNode(getCurrentPos(ctx)); } @Override public void exitLimitClause(BallerinaParser.LimitClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endLimitClauseNode(getCurrentPos(ctx), getWS(ctx), ctx.DecimalIntegerLiteral().getText()); } @Override public void enterOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOrderByVariableNode(getCurrentPos(ctx)); } @Override public void exitOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (ctx.exception != null) { return; } boolean isAscending = ctx.orderByType() != null && ctx.orderByType().ASCENDING() != null; boolean isDescending = ctx.orderByType() != null && ctx.orderByType().DESCENDING() != null; this.pkgBuilder.endOrderByVariableNode(getCurrentPos(ctx), getWS(ctx), isAscending, isDescending); } @Override public void enterGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startGroupByClauseNode(getCurrentPos(ctx)); } @Override public void exitGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endGroupByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterHavingClause(BallerinaParser.HavingClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startHavingClauseNode(getCurrentPos(ctx)); } @Override public void exitHavingClause(BallerinaParser.HavingClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endHavingClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectExpressionNode(getCurrentPos(ctx)); } @Override public void exitSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() == null ? null : ctx.Identifier().getText(); this.pkgBuilder.endSelectExpressionNode(identifier, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectClause(BallerinaParser.SelectClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectClauseNode(getCurrentPos(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (ctx.exception != null) { return; } boolean isSelectAll = ctx.MUL() != null; boolean isGroupByClauseAvailable = ctx.groupByClause() != null; boolean isHavingClauseAvailable = ctx.havingClause() != null; this.pkgBuilder.endSelectClauseNode(isSelectAll, isGroupByClauseAvailable, isHavingClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectExpressionList(); } @Override public void exitSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSelectExpressionList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void enterWhereClause(BallerinaParser.WhereClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWhereClauseNode(getCurrentPos(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endWhereClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSetAssignmentClause(BallerinaParser.SetAssignmentClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSetAssignmentClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitSetAssignmentClause(BallerinaParser.SetAssignmentClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSetAssignmentClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSetClause(BallerinaParser.SetClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSetClauseNode(); } @Override public void exitSetClause(BallerinaParser.SetClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSetClauseNode(getWS(ctx), ctx.getChildCount() / 2); } @Override public void enterStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamActionNode(getCurrentPos(ctx), diagnosticSrc.pkgID); } @Override public void exitStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endStreamActionNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternStreamingEdgeInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (ctx.exception != null) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.endPatternStreamingEdgeInputNode(getCurrentPos(ctx), getWS(ctx), alias); } @Override public void enterWindowClause(BallerinaParser.WindowClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWindowClauseNode(getCurrentPos(ctx)); } @Override public void exitWindowClause(BallerinaParser.WindowClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endWindowsClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterWithinClause(BallerinaParser.WithinClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWithinClause(getCurrentPos(ctx)); } @Override public void exitWithinClause(BallerinaParser.WithinClauseContext ctx) { if (ctx.exception != null) { return; } String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endWithinClause(getCurrentPos(ctx), getWS(ctx), timeDurationValue, timeScale); } @Override public void enterPatternClause(BallerinaParser.PatternClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternClause(getCurrentPos(ctx)); } @Override public void exitPatternClause(BallerinaParser.PatternClauseContext ctx) { if (ctx.exception != null) { return; } boolean isForAllEvents = ctx.EVERY() != null; boolean isWithinClauseAvailable = ctx.withinClause() != null; this.pkgBuilder.endPatternClause(isForAllEvents, isWithinClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (ctx.exception != null) { return; } boolean followedByAvailable = ctx.FOLLOWED() != null && ctx.BY() != null; boolean enclosedInParenthesis = ctx.LEFT_PARENTHESIS() != null && ctx.RIGHT_PARENTHESIS() != null; boolean andWithNotAvailable = ctx.NOT() != null && ctx.AND() != null; boolean forWithNotAvailable = ctx.timeScale() != null; boolean onlyAndAvailable = ctx.AND() != null && ctx.NOT() == null && ctx.FOR() == null; boolean onlyOrAvailable = ctx.OR() != null && ctx.NOT() == null && ctx.FOR() == null; boolean commaSeparated = ctx.COMMA() != null; String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endPatternStreamingInputNode(getCurrentPos(ctx), getWS(ctx), followedByAvailable, enclosedInParenthesis, andWithNotAvailable, forWithNotAvailable, onlyAndAvailable, onlyOrAvailable, commaSeparated, timeDurationValue, timeScale); } @Override public void enterStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (ctx.exception != null) { return; } String alias = null; if (ctx.alias != null) { alias = ctx.alias.getText(); } this.pkgBuilder.endStreamingInputNode(alias, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startJoinStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (ctx.exception != null) { return; } boolean unidirectionalJoin = ctx.UNIDIRECTIONAL() != null; if (!unidirectionalJoin) { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, false, joinType); } else { if (ctx.getChild(0).getText().equals("unidirectional")) { String joinType = (ctx).children.get(1).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), true, false, joinType); } else { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, true, joinType); } } } /** * {@inheritDoc} */ @Override public void exitJoinType(BallerinaParser.JoinTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endJoinType(getWS(ctx)); } @Override public void enterOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOutputRateLimitNode(getCurrentPos(ctx)); } @Override public void exitOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (ctx.exception != null) { return; } boolean isSnapshotOutputRateLimit = false; boolean isFirst = false; boolean isLast = false; boolean isAll = false; if (ctx.SNAPSHOT() != null) { isSnapshotOutputRateLimit = true; } else { if (ctx.LAST() != null) { isLast = true; } else if (ctx.FIRST() != null) { isFirst = true; } else if (ctx.LAST() != null) { isAll = true; } } String timeScale = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); } this.pkgBuilder.endOutputRateLimitNode(getCurrentPos(ctx), getWS(ctx), isSnapshotOutputRateLimit, isFirst, isLast, isAll, timeScale, ctx.DecimalIntegerLiteral().getText()); } @Override public void enterTableQuery(BallerinaParser.TableQueryContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTableQueryNode(getCurrentPos(ctx)); } @Override public void exitTableQuery(BallerinaParser.TableQueryContext ctx) { if (ctx.exception != null) { return; } boolean isSelectClauseAvailable = ctx.selectClause() != null; boolean isOrderByClauseAvailable = ctx.orderByClause() != null; boolean isJoinClauseAvailable = ctx.joinStreamingInput() != null; boolean isLimitClauseAvailable = ctx.limitClause() != null; this.pkgBuilder.endTableQueryNode(isJoinClauseAvailable, isSelectClauseAvailable, isOrderByClauseAvailable, isLimitClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamingQueryStatementNode(getCurrentPos(ctx)); } @Override public void exitStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endStreamingQueryStatementNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForeverNode(getCurrentPos(ctx)); } @Override public void exitForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endForeverNode(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (ctx.exception != null) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (ctx.exception != null) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitDeprecatedAttachment(BallerinaParser.DeprecatedAttachmentContext ctx) { if (ctx.exception != null) { return; } String contentText = ctx.deprecatedText() != null ? ctx.deprecatedText().getText() : ""; this.pkgBuilder.createDeprecatedNode(getCurrentPos(ctx), getWS(ctx), contentText); } /** * {@inheritDoc} */ @Override public void exitAwaitExpr(BallerinaParser.AwaitExprContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createAwaitExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (ctx.exception != null) { return; } if (ctx.START() != null) { this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx)); } } /** * {@inheritDoc} */ @Override public void enterMatchExpression(BallerinaParser.MatchExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMatchExpression(); } /** * {@inheritDoc} */ @Override public void exitMatchExpression(BallerinaParser.MatchExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMatchExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitMatchExpressionPatternClause(BallerinaParser.MatchExpressionPatternClauseContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addMatchExprPattern(getCurrentPos(ctx), getWS(ctx), identifier); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterScopeStatement(BallerinaParser.ScopeStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startScopeStmt(); } /** * {@inheritDoc} */ @Override public void exitScopeStatement(BallerinaParser.ScopeStatementContext ctx) { if (ctx.exception != null) { return; } String name = null; if (ctx.scopeClause().Identifier() != null) { name = ctx.scopeClause().Identifier().getText(); } BLangIdentifier identifier = new BLangIdentifier(); identifier.setValue(name); this.pkgBuilder.endScopeStmt(getCurrentPos(ctx), getWS(ctx), identifier, getFunctionDefinition(ctx)); } @Override public void exitScopeClause(BallerinaParser.ScopeClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addScopeBlock(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void enterCompensationClause(BallerinaParser.CompensationClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOnCompensationBlock(); } @Override public void exitCompensateStatement(BallerinaParser.CompensateStatementContext ctx) { if (ctx.exception != null) { return; } String scope = null; if (ctx.Identifier() != null) { scope = ctx.Identifier().getText(); } this.pkgBuilder.addCompensateStatement(getCurrentPos(ctx), getWS(ctx), scope); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPosFromIdentifier(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } else if (integerLiteralContext.BinaryIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.BinaryIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0b", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 2, DiagnosticCode.BINARY_TOO_SMALL, DiagnosticCode.BINARY_TOO_LARGE); } return null; } /** * Get the original integer value. * * @param integerLiteralContext integer literal context * @return original integer value */ private String getOriginalIntegerValue(BallerinaParser.IntegerLiteralContext integerLiteralContext) { String originalValue = null; if (integerLiteralContext.DecimalIntegerLiteral() != null) { originalValue = integerLiteralContext.DecimalIntegerLiteral().getText(); } else if (integerLiteralContext.HexIntegerLiteral() != null) { originalValue = integerLiteralContext.HexIntegerLiteral().getText(); } else if (integerLiteralContext.BinaryIntegerLiteral() != null) { originalValue = integerLiteralContext.BinaryIntegerLiteral().getText(); } return originalValue; } private BLangLambdaFunction getFunctionDefinition(BallerinaParser.ScopeStatementContext ctx) { boolean bodyExists = ctx.compensationClause().callableUnitBody() != null; return this.pkgBuilder.getScopesFunctionDef(getCurrentPos(ctx), getWS(ctx), bodyExists, ctx.scopeClause().Identifier().getText()); } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_EXTERN = "extern"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitSimpleParameter(BallerinaParser.SimpleParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVar(getCurrentPos(ctx), getWS(ctx), null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (ctx.exception != null) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().Identifier().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (ctx.exception != null) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void enterServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean constrained = ctx.nameReference() != null; this.pkgBuilder.endServiceDef(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), constrained); } @Override public void exitServiceEndpointAttachments(BallerinaParser.ServiceEndpointAttachmentsContext ctx) { if (ctx.exception != null) { return; } if (ctx.recordLiteral() != null) { this.pkgBuilder.addAnonymousEndpointBind(getWS(ctx)); return; } this.pkgBuilder.addServiceEndpointAttachments(ctx.nameReference().size(), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addServiceBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterResourceDefinition(BallerinaParser.ResourceDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startResourceDef(); } /** * {@inheritDoc} */ @Override public void exitResourceDefinition(BallerinaParser.ResourceDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean markdownDocExists = ctx.documentationString() != null; boolean isDeprecated = ctx.deprecatedAttachment() != null; boolean hasParameters = ctx.resourceParameterList() != null; this.pkgBuilder.endResourceDef(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), markdownDocExists, isDeprecated, hasParameters); } @Override public void enterResourceParameterList(BallerinaParser.ResourceParameterListContext ctx) { if (ctx.exception != null) { return; } final BallerinaParser.ResourceDefinitionContext parent = (BallerinaParser.ResourceDefinitionContext) ctx.parent; this.pkgBuilder.addResourceAnnotation(parent.annotationAttachment().size()); } @Override public void exitResourceParameterList(BallerinaParser.ResourceParameterListContext ctx) { if (ctx.exception != null) { return; } final boolean isEndpointDefined = ctx.ENDPOINT() != null; if (isEndpointDefined) { this.pkgBuilder.addEndpointVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } int nativeKWTokenIndex = 0; boolean publicFunc = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); if (publicFunc) { nativeKWTokenIndex = 1; } boolean nativeFunc = KEYWORD_EXTERN.equals(ctx.getChild(nativeKWTokenIndex).getText()); boolean bodyExists = ctx.callableUnitBody() != null; if (ctx.Identifier() != null) { this.pkgBuilder.endObjectOuterFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, nativeFunc, bodyExists, ctx.Identifier().getText()); return; } boolean isReceiverAttached = ctx.typeName() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, nativeFunc, bodyExists, isReceiverAttached, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (ctx.exception != null) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterObjectInitializer(BallerinaParser.ObjectInitializerContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectInitializer(BallerinaParser.ObjectInitializerContext ctx) { if (ctx.exception != null) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectInitFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.NEW().getText(), publicFunc, bodyExists, markdownDocExists, false, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitObjectInitializerParameterList(BallerinaParser.ObjectInitializerParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endObjectInitParamList(getWS(ctx), ctx.objectParameterList() != null, ctx.objectParameterList() != null && ctx.objectParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (ctx.exception != null) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, ctx.annotationAttachment().size(), false, isOptional); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (ctx.exception != null) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); boolean exprAvailable = ctx.expression() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, exprAvailable, deprecatedDocExists, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectParameterList(BallerinaParser.ObjectParameterListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitObjectParameter(BallerinaParser.ObjectParameterContext ctx) { if (ctx.exception != null) { return; } boolean isField = ctx.typeName() == null; this.pkgBuilder.addObjectParameter(getCurrentPos(ctx), getWS(ctx), isField, ctx.Identifier().getText(), ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitObjectDefaultableParameter(BallerinaParser.ObjectDefaultableParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean nativeFunc = ctx.EXTERN() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; boolean deprecatedDocExists = ctx.deprecatedAttachment() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, nativeFunc, bodyExists, markdownDocExists, deprecatedDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.userDefineTypeName() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPosFromIdentifier(ctx.Identifier()), publicAnnotation, isTypeAttached); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (ctx.exception != null) { return; } boolean publicVar = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.expression() != null, publicVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAttachPoint(AttachPoint.getAttachmentPoint(ctx.getText())); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWorker(); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (ctx.exception != null) { return; } String workerName = null; if (ctx.workerDefinition() != null) { workerName = ctx.workerDefinition().Identifier().getText(); } this.pkgBuilder.addWorker(getCurrentPos(ctx), getWS(ctx), workerName); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1) instanceof BallerinaParser.SealedLiteralContext) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 3; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 3; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.typeName().size()); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } @Override public void exitRecordFieldDefinitionList(BallerinaParser.RecordFieldDefinitionListContext ctx) { if (ctx.exception != null) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; boolean sealed = hasRestField ? ctx.recordRestFieldDefinition().sealedLiteral() != null : false; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, sealed, hasRestField); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.functionTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); if (ctx.nameReference() != null) { this.pkgBuilder.addConstraintType(getCurrentPos(ctx), getWS(ctx), typeName); } else if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(getCurrentPos(ctx), getWS(ctx), typeName); } } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (ctx.exception != null) { return; } boolean paramsAvail = false, paramsTypeOnly = false, retParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; paramsTypeOnly = true; } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (ctx.exception != null) { return; } boolean exprAvailable = ctx.ASSIGN() != null; this.pkgBuilder.addVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), exprAvailable, false); } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx)); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (ctx.exception != null) { return; } String columnName = ctx.getChild(0).getText(); boolean keyColumn = ctx.PRIMARYKEY() != null; if (keyColumn) { columnName = ctx.getChild(1).getText(); this.pkgBuilder.addTableColumn(columnName); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { this.pkgBuilder.addTableColumn(columnName); } } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (ctx.exception != null) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitArrayLiteral(BallerinaParser.ArrayLiteralContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addArrayInitExpr(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (ctx.exception != null) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitEndpointDeclaration(BallerinaParser.EndpointDeclarationContext ctx) { if (ctx.exception != null) { return; } String endpointName = ctx.Identifier().getText(); boolean isInitExprExist = ctx.endpointInitlization() != null; this.pkgBuilder.addEndpointDefinition(getCurrentPos(ctx), getWS(ctx), endpointName, isInitExprExist); } @Override public void exitChannelType(BallerinaParser.ChannelTypeContext ctx) { if (ctx.exception != null) { return; } String typeName = ctx.getChild(0).getText(); this.pkgBuilder.addConstraintTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), typeName); } @Override public void exitEndpointType(BallerinaParser.EndpointTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addEndpointType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGlobalEndpointDefinition(BallerinaParser.GlobalEndpointDefinitionContext ctx) { if (ctx.exception != null) { return; } if (KEYWORD_PUBLIC.equals(ctx.getChild(0).getText())) { this.pkgBuilder.markLastEndpointAsPublic(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (ctx.exception != null) { return; } boolean isVarDeclaration = false; if (ctx.VAR() != null) { isVarDeclaration = true; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx), isVarDeclaration); } @Override public void exitTupleDestructuringStatement(BallerinaParser.TupleDestructuringStatementContext ctx) { if (ctx.exception != null) { return; } boolean isVarDeclaration = false; boolean isVarExist = ctx.variableReferenceList() != null; if (ctx.VAR() != null) { isVarDeclaration = true; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx), isVarExist, isVarDeclaration); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (ctx.exception != null) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPostIncrementStatement(BallerinaParser.PostIncrementStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addPostIncrementStatement(getCurrentPos(ctx), getWS(ctx), ctx.postArithmeticOperator().getText().substring(0, 1)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addMatchStmtPattern(getCurrentPos(ctx), getWS(ctx), identifier); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addForeachStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinClause(BallerinaParser.JoinClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startJoinCause(); } @Override public void exitJoinClause(BallerinaParser.JoinClauseContext ctx) { this.pkgBuilder.addJoinCause(this.getWS(ctx), ctx.Identifier().getText()); } @Override public void exitAnyJoinCondition(BallerinaParser.AnyJoinConditionContext ctx) { if (ctx.exception != null) { return; } List<String> workerNames = new ArrayList<>(); if (ctx.Identifier() != null) { workerNames = ctx.Identifier().stream().map(TerminalNode::getText).collect(Collectors.toList()); } int joinCount = 0; Object value; if ((value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { if (value instanceof Long) { try { joinCount = ((Long) value).intValue(); } catch (NumberFormatException ex) { } } } this.pkgBuilder.addJoinCondition(getWS(ctx), "SOME", workerNames, joinCount); } @Override public void exitAllJoinCondition(BallerinaParser.AllJoinConditionContext ctx) { if (ctx.exception != null) { return; } List<String> workerNames = new ArrayList<>(); if (ctx.Identifier() != null) { workerNames = ctx.Identifier().stream().map(TerminalNode::getText).collect(Collectors.toList()); } this.pkgBuilder.addJoinCondition(getWS(ctx), "ALL", workerNames, -1); } @Override public void enterTimeoutClause(BallerinaParser.TimeoutClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTimeoutCause(); } @Override public void exitTimeoutClause(BallerinaParser.TimeoutClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTimeoutCause(this.getWS(ctx), ctx.Identifier().getText()); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (ctx.exception != null) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitInvokeWorker(BallerinaParser.InvokeWorkerContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), false, ctx .expression().size() > 1); } @Override public void exitInvokeFork(BallerinaParser.InvokeForkContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), "FORK", true, false); } @Override public void exitWorkerReply(BallerinaParser.WorkerReplyContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addWorkerReceiveStmt(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx .expression().size() > 1); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (ctx.exception != null) { return; } FieldContext field = ctx.field(); String fieldName; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldType, ctx.field().NOT() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (ctx.exception != null) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; String invocation = ctx.invocation().anyIdentifierName().getText(); boolean safeNavigate = ctx.invocation().NOT() != null; this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, safeNavigate); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTransactionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDoneStatement(BallerinaParser.DoneStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addDoneStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitOncommitStatement(BallerinaParser.OncommitStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addCommittedBlock(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitOnabortStatement(BallerinaParser.OnabortStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addAbortedBlock(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (ctx.exception != null) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; this.pkgBuilder.addXMLNSDeclaration(getCurrentPos(ctx), getWS(ctx), namespaceUri, prefix, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } /** * {@inheritDoc} */ @Override public void exitTypeAccessExpression(BallerinaParser.TypeAccessExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (ctx.exception != null) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitBracedOrTupleExpression(BallerinaParser.BracedOrTupleExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createBracedOrTupleExpression(getCurrentPos(ctx), getWS(ctx), ctx.expression().size()); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (ctx.exception != null) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (ctx.exception != null) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (ctx.exception != null) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); endingString = endingString.substring(0, endingString.length() - 3); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (ctx.exception != null) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (ctx.exception != null) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (ctx.exception != null) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (ctx.exception != null) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (ctx.exception != null) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (ctx.exception != null) { return; } if (ctx.expression() != null) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (ctx.exception != null) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void exitTableQueryExpression(BallerinaParser.TableQueryExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addTableQueryExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOrderByClauseNode(getCurrentPos(ctx)); } @Override public void exitOrderByClause(BallerinaParser.OrderByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endOrderByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterLimitClause(BallerinaParser.LimitClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startLimitClauseNode(getCurrentPos(ctx)); } @Override public void exitLimitClause(BallerinaParser.LimitClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endLimitClauseNode(getCurrentPos(ctx), getWS(ctx), ctx.DecimalIntegerLiteral().getText()); } @Override public void enterOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOrderByVariableNode(getCurrentPos(ctx)); } @Override public void exitOrderByVariable(BallerinaParser.OrderByVariableContext ctx) { if (ctx.exception != null) { return; } boolean isAscending = ctx.orderByType() != null && ctx.orderByType().ASCENDING() != null; boolean isDescending = ctx.orderByType() != null && ctx.orderByType().DESCENDING() != null; this.pkgBuilder.endOrderByVariableNode(getCurrentPos(ctx), getWS(ctx), isAscending, isDescending); } @Override public void enterGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startGroupByClauseNode(getCurrentPos(ctx)); } @Override public void exitGroupByClause(BallerinaParser.GroupByClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endGroupByClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterHavingClause(BallerinaParser.HavingClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startHavingClauseNode(getCurrentPos(ctx)); } @Override public void exitHavingClause(BallerinaParser.HavingClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endHavingClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectExpressionNode(getCurrentPos(ctx)); } @Override public void exitSelectExpression(BallerinaParser.SelectExpressionContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() == null ? null : ctx.Identifier().getText(); this.pkgBuilder.endSelectExpressionNode(identifier, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectClause(BallerinaParser.SelectClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectClauseNode(getCurrentPos(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (ctx.exception != null) { return; } boolean isSelectAll = ctx.MUL() != null; boolean isGroupByClauseAvailable = ctx.groupByClause() != null; boolean isHavingClauseAvailable = ctx.havingClause() != null; this.pkgBuilder.endSelectClauseNode(isSelectAll, isGroupByClauseAvailable, isHavingClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSelectExpressionList(); } @Override public void exitSelectExpressionList(BallerinaParser.SelectExpressionListContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSelectExpressionList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void enterWhereClause(BallerinaParser.WhereClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWhereClauseNode(getCurrentPos(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endWhereClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSetAssignmentClause(BallerinaParser.SetAssignmentClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSetAssignmentClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitSetAssignmentClause(BallerinaParser.SetAssignmentClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSetAssignmentClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterSetClause(BallerinaParser.SetClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startSetClauseNode(); } @Override public void exitSetClause(BallerinaParser.SetClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endSetClauseNode(getWS(ctx), ctx.getChildCount() / 2); } @Override public void enterStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamActionNode(getCurrentPos(ctx), diagnosticSrc.pkgID); } @Override public void exitStreamingAction(BallerinaParser.StreamingActionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endStreamActionNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternStreamingEdgeInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingEdgeInput(BallerinaParser.PatternStreamingEdgeInputContext ctx) { if (ctx.exception != null) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.endPatternStreamingEdgeInputNode(getCurrentPos(ctx), getWS(ctx), alias); } @Override public void enterWindowClause(BallerinaParser.WindowClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWindowClauseNode(getCurrentPos(ctx)); } @Override public void exitWindowClause(BallerinaParser.WindowClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endWindowsClauseNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterWithinClause(BallerinaParser.WithinClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startWithinClause(getCurrentPos(ctx)); } @Override public void exitWithinClause(BallerinaParser.WithinClauseContext ctx) { if (ctx.exception != null) { return; } String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endWithinClause(getCurrentPos(ctx), getWS(ctx), timeDurationValue, timeScale); } @Override public void enterPatternClause(BallerinaParser.PatternClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternClause(getCurrentPos(ctx)); } @Override public void exitPatternClause(BallerinaParser.PatternClauseContext ctx) { if (ctx.exception != null) { return; } boolean isForAllEvents = ctx.EVERY() != null; boolean isWithinClauseAvailable = ctx.withinClause() != null; this.pkgBuilder.endPatternClause(isForAllEvents, isWithinClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startPatternStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitPatternStreamingInput(BallerinaParser.PatternStreamingInputContext ctx) { if (ctx.exception != null) { return; } boolean followedByAvailable = ctx.FOLLOWED() != null && ctx.BY() != null; boolean enclosedInParenthesis = ctx.LEFT_PARENTHESIS() != null && ctx.RIGHT_PARENTHESIS() != null; boolean andWithNotAvailable = ctx.NOT() != null && ctx.AND() != null; boolean forWithNotAvailable = ctx.timeScale() != null; boolean onlyAndAvailable = ctx.AND() != null && ctx.NOT() == null && ctx.FOR() == null; boolean onlyOrAvailable = ctx.OR() != null && ctx.NOT() == null && ctx.FOR() == null; boolean commaSeparated = ctx.COMMA() != null; String timeScale = null; String timeDurationValue = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); timeDurationValue = ctx.DecimalIntegerLiteral().getText(); } this.pkgBuilder.endPatternStreamingInputNode(getCurrentPos(ctx), getWS(ctx), followedByAvailable, enclosedInParenthesis, andWithNotAvailable, forWithNotAvailable, onlyAndAvailable, onlyOrAvailable, commaSeparated, timeDurationValue, timeScale); } @Override public void enterStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitStreamingInput(BallerinaParser.StreamingInputContext ctx) { if (ctx.exception != null) { return; } String alias = null; if (ctx.alias != null) { alias = ctx.alias.getText(); } this.pkgBuilder.endStreamingInputNode(alias, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startJoinStreamingInputNode(getCurrentPos(ctx)); } @Override public void exitJoinStreamingInput(BallerinaParser.JoinStreamingInputContext ctx) { if (ctx.exception != null) { return; } boolean unidirectionalJoin = ctx.UNIDIRECTIONAL() != null; if (!unidirectionalJoin) { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, false, joinType); } else { if (ctx.getChild(0).getText().equals("unidirectional")) { String joinType = (ctx).children.get(1).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), true, false, joinType); } else { String joinType = (ctx).children.get(0).getText(); this.pkgBuilder.endJoinStreamingInputNode(getCurrentPos(ctx), getWS(ctx), false, true, joinType); } } } /** * {@inheritDoc} */ @Override public void exitJoinType(BallerinaParser.JoinTypeContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endJoinType(getWS(ctx)); } @Override public void enterOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOutputRateLimitNode(getCurrentPos(ctx)); } @Override public void exitOutputRateLimit(BallerinaParser.OutputRateLimitContext ctx) { if (ctx.exception != null) { return; } boolean isSnapshotOutputRateLimit = false; boolean isFirst = false; boolean isLast = false; boolean isAll = false; if (ctx.SNAPSHOT() != null) { isSnapshotOutputRateLimit = true; } else { if (ctx.LAST() != null) { isLast = true; } else if (ctx.FIRST() != null) { isFirst = true; } else if (ctx.LAST() != null) { isAll = true; } } String timeScale = null; if (ctx.timeScale() != null) { timeScale = ctx.timeScale().getText(); } this.pkgBuilder.endOutputRateLimitNode(getCurrentPos(ctx), getWS(ctx), isSnapshotOutputRateLimit, isFirst, isLast, isAll, timeScale, ctx.DecimalIntegerLiteral().getText()); } @Override public void enterTableQuery(BallerinaParser.TableQueryContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startTableQueryNode(getCurrentPos(ctx)); } @Override public void exitTableQuery(BallerinaParser.TableQueryContext ctx) { if (ctx.exception != null) { return; } boolean isSelectClauseAvailable = ctx.selectClause() != null; boolean isOrderByClauseAvailable = ctx.orderByClause() != null; boolean isJoinClauseAvailable = ctx.joinStreamingInput() != null; boolean isLimitClauseAvailable = ctx.limitClause() != null; this.pkgBuilder.endTableQueryNode(isJoinClauseAvailable, isSelectClauseAvailable, isOrderByClauseAvailable, isLimitClauseAvailable, getCurrentPos(ctx), getWS(ctx)); } @Override public void enterStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startStreamingQueryStatementNode(getCurrentPos(ctx)); } @Override public void exitStreamingQueryStatement(BallerinaParser.StreamingQueryStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endStreamingQueryStatementNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startForeverNode(getCurrentPos(ctx)); } @Override public void exitForeverStatement(BallerinaParser.ForeverStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endForeverNode(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (ctx.exception != null) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (ctx.exception != null) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (ctx.exception != null) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitDeprecatedAttachment(BallerinaParser.DeprecatedAttachmentContext ctx) { if (ctx.exception != null) { return; } String contentText = ctx.deprecatedText() != null ? ctx.deprecatedText().getText() : ""; this.pkgBuilder.createDeprecatedNode(getCurrentPos(ctx), getWS(ctx), contentText); } /** * {@inheritDoc} */ @Override public void exitAwaitExpr(BallerinaParser.AwaitExprContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createAwaitExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (ctx.exception != null) { return; } if (ctx.START() != null) { this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx)); } } /** * {@inheritDoc} */ @Override public void enterMatchExpression(BallerinaParser.MatchExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startMatchExpression(); } /** * {@inheritDoc} */ @Override public void exitMatchExpression(BallerinaParser.MatchExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.endMatchExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitMatchExpressionPatternClause(BallerinaParser.MatchExpressionPatternClauseContext ctx) { if (ctx.exception != null) { return; } String identifier = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addMatchExprPattern(getCurrentPos(ctx), getWS(ctx), identifier); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterScopeStatement(BallerinaParser.ScopeStatementContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startScopeStmt(); } /** * {@inheritDoc} */ @Override public void exitScopeStatement(BallerinaParser.ScopeStatementContext ctx) { if (ctx.exception != null) { return; } String name = null; if (ctx.scopeClause().Identifier() != null) { name = ctx.scopeClause().Identifier().getText(); } BLangIdentifier identifier = new BLangIdentifier(); identifier.setValue(name); this.pkgBuilder.endScopeStmt(getCurrentPos(ctx), getWS(ctx), identifier, getFunctionDefinition(ctx)); } @Override public void exitScopeClause(BallerinaParser.ScopeClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.addScopeBlock(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void enterCompensationClause(BallerinaParser.CompensationClauseContext ctx) { if (ctx.exception != null) { return; } this.pkgBuilder.startOnCompensationBlock(); } @Override public void exitCompensateStatement(BallerinaParser.CompensateStatementContext ctx) { if (ctx.exception != null) { return; } String scope = null; if (ctx.Identifier() != null) { scope = ctx.Identifier().getText(); } this.pkgBuilder.addCompensateStatement(getCurrentPos(ctx), getWS(ctx), scope); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPosFromIdentifier(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } else if (integerLiteralContext.BinaryIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.BinaryIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0b", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 2, DiagnosticCode.BINARY_TOO_SMALL, DiagnosticCode.BINARY_TOO_LARGE); } return null; } private BLangLambdaFunction getFunctionDefinition(BallerinaParser.ScopeStatementContext ctx) { boolean bodyExists = ctx.compensationClause().callableUnitBody() != null; return this.pkgBuilder.getScopesFunctionDef(getCurrentPos(ctx), getWS(ctx), bodyExists, ctx.scopeClause().Identifier().getText()); } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } }
Please use `null == name` and `0 == name.length()`
public ShardingSphereSavepoint(final String name) throws SQLException { if (name == null || name.length() == 0) { throw new SQLException("Savepoint name can not be NULL or empty"); } this.savepointName = name; }
if (name == null || name.length() == 0) {
public ShardingSphereSavepoint(final String name) throws SQLException { if (null == name || 0 == name.length()) { throw new SQLException("Savepoint name can not be NULL or empty"); } savepointName = name; }
class ShardingSphereSavepoint implements Savepoint { private final String savepointName; public ShardingSphereSavepoint() { this.savepointName = getUniqueId(); } @Override public int getSavepointId() throws SQLException { throw new SQLException("Only named savepoint are supported."); } @Override public String getSavepointName() { return savepointName; } private static String getUniqueId() { String uidStr = new UID().toString(); int uidLength = uidStr.length(); StringBuilder safeString = new StringBuilder(uidLength + 1); safeString.append('_'); for (int i = 0; i < uidLength; i++) { char c = uidStr.charAt(i); if (Character.isLetter(c) || Character.isDigit(c)) { safeString.append(c); } else { safeString.append('_'); } } return safeString.toString(); } }
class ShardingSphereSavepoint implements Savepoint { private final String savepointName; public ShardingSphereSavepoint() { savepointName = getUniqueId(); } @Override public int getSavepointId() throws SQLException { throw new SQLException("Only named savepoint are supported."); } @Override public String getSavepointName() { return savepointName; } private static String getUniqueId() { String uidStr = new UID().toString(); int uidLength = uidStr.length(); StringBuilder safeString = new StringBuilder(uidLength + 1); safeString.append('_'); for (int i = 0; i < uidLength; i++) { char c = uidStr.charAt(i); if (Character.isLetter(c) || Character.isDigit(c)) { safeString.append(c); } else { safeString.append('_'); } } return safeString.toString(); } }
De-serialization of polymorphic types are handled using Jackson annotations. See this models https://github.com/Azure/azure-sdk-for-java/blob/e6771250bb50c5b38a301afed06afb88c6737a08/eventgrid/data-plane/src/test/java/com/microsoft/azure/eventgrid/customization/models/ShippingInfo.java#L15
public EventGridSubscriber() { this.defaultSerializerAdapter = new AzureJacksonAdapter(); this.eventTypeToEventDataMapping = new HashMap<>(); }
this.defaultSerializerAdapter = new AzureJacksonAdapter();
public EventGridSubscriber() { this.defaultSerializerAdapter = new AzureJacksonAdapter(); this.eventTypeToEventDataMapping = new HashMap<>(); }
class EventGridSubscriber { /** * The default adapter for to be used for de-serializing the events */ private final AzureJacksonAdapter defaultSerializerAdapter; /** * The map containing user defined mapping of eventType to Java model type */ private Map<String, Type> eventTypeToEventDataMapping; @Beta /** * Add a custom event mapping. If a mapping with same eventType exists then the old eventDataType is replaced by * the specified eventDataType * * @param eventType the event type name * @param eventDataType type of the Java model that the event type name mapped to */ @Beta public void putCustomEventMapping(String eventType, Type eventDataType) { if (eventType == null || eventType.isEmpty()) { throw new IllegalArgumentException("eventType parameter is required and cannot be null or empty"); } if (eventDataType == null) { throw new IllegalArgumentException("eventDataType parameter is required and cannot be null"); } this.eventTypeToEventDataMapping.put(eventType.toLowerCase(), eventDataType); } /** * Get type of the Java model that is mapped to the given eventType. * * @param eventType the event type name * @return type of the Java model id mapping exists, null otherwise */ @Beta public Type getCustomEventMapping(String eventType) { if (eventType == null || eventType.isEmpty()) { throw new IllegalArgumentException("eventType parameter is required and cannot be null or empty"); } if (!this.eventTypeToEventDataMapping.containsKey(eventType.toLowerCase())) { return null; } return this.eventTypeToEventDataMapping.get(eventType.toLowerCase()); } /** * Removes the mapping with the given eventType. * * @param eventType the event type name * @return true if the mapping exists and removed, false if mapping does not exists */ @Beta public boolean removeCustomEventMapping(String eventType) { if (eventType == null || eventType.isEmpty()) { throw new IllegalArgumentException("eventType parameter is required and cannot be null or empty"); } if (!this.eventTypeToEventDataMapping.containsKey(eventType.toLowerCase())) { return false; } this.eventTypeToEventDataMapping.remove(eventType.toLowerCase()); return true; } /** * Checks an event mapping with the given eventType exists. * * @param eventType the event type name * @return true if the mapping exists, false otherwise */ @Beta public boolean containsEventMappingFor(String eventType) { if (eventType == null || eventType.isEmpty()) { throw new IllegalArgumentException("eventType parameter is required and cannot be null or empty"); } return this.eventTypeToEventDataMapping.containsKey(eventType.toLowerCase()); } /** * De-serialize the events in the given requested content using default de-serializer. * * @param requestContent the request content in string format * @return De-serialized events. * * @throws IOException */ @Beta public EventGridEvent[] DeserializeEventGridEvents(final String requestContent) throws IOException { return this.DeserializeEventGridEvents(requestContent, this.defaultSerializerAdapter); } /** * De-serialize the events in the given requested content using the provided de-serializer. * * @param requestContent the request content in string format * @param serializerAdapter the de-serializer * @return e-serialized events. * @throws IOException */ @Beta public EventGridEvent[] DeserializeEventGridEvents(final String requestContent, final SerializerAdapter<ObjectMapper> serializerAdapter) throws IOException { EventGridEvent[] eventGridEvents = serializerAdapter.<EventGridEvent[]>deserialize(requestContent, EventGridEvent[].class); for (EventGridEvent receivedEvent : eventGridEvents) { if (receivedEvent.data() == null) { continue; } else { final String dataStr = serializerAdapter.serializeRaw(receivedEvent.data()); final String eventType = receivedEvent.eventType(); if (SystemEventTypeMappings.containsMappingFor(eventType)) { final Object eventData = serializerAdapter.<Object>deserialize(dataStr, SystemEventTypeMappings.getMapping(eventType)); setEventData(receivedEvent, eventData); } else if (containsEventMappingFor(eventType)) { final Object eventData = serializerAdapter.<Object>deserialize(dataStr, getCustomEventMapping(eventType)); setEventData(receivedEvent, eventData); } } } return eventGridEvents; } private void setEventData(EventGridEvent event, Object data) { try { Field dataField = event.getClass().getDeclaredField("data"); dataField.setAccessible(true); dataField.set(event, data); } catch (NoSuchFieldException nsfe) { throw new RuntimeException(nsfe); } catch (IllegalAccessException iae) { throw new RuntimeException(iae); } } }
class EventGridSubscriber { /** * The default adapter to be used for de-serializing the events. */ private final AzureJacksonAdapter defaultSerializerAdapter; /** * The map containing user defined mapping of eventType to Java model type. */ private Map<String, Type> eventTypeToEventDataMapping; /** * Creates EventGridSubscriber with default de-serializer. */ @Beta /** * Add a custom event mapping. If a mapping with same eventType exists then the old eventDataType is replaced by * the specified eventDataType. * * @param eventType the event type name. * @param eventDataType type of the Java model that the event type name mapped to. */ @Beta public void putCustomEventMapping(final String eventType, final Type eventDataType) { if (eventType == null || eventType.isEmpty()) { throw new IllegalArgumentException("eventType parameter is required and cannot be null or empty"); } if (eventDataType == null) { throw new IllegalArgumentException("eventDataType parameter is required and cannot be null"); } this.eventTypeToEventDataMapping.put(canonicalizeEventType(eventType), eventDataType); } /** * Get type of the Java model that is mapped to the given eventType. * * @param eventType the event type name. * @return type of the Java model if mapping exists, null otherwise. */ @Beta public Type getCustomEventMapping(final String eventType) { if (!containsCustomEventMappingFor(eventType)) { return null; } else { return this.eventTypeToEventDataMapping.get(canonicalizeEventType(eventType)); } } /** * @return get all registered custom event mappings. */ @Beta public Set<Map.Entry<String, Type>> getAllCustomEventMappings() { return Collections.unmodifiableSet(this.eventTypeToEventDataMapping.entrySet()); } /** * Removes the mapping with the given eventType. * * @param eventType the event type name. * @return true if the mapping exists and removed, false if mapping does not exists. */ @Beta public boolean removeCustomEventMapping(final String eventType) { if (!containsCustomEventMappingFor(eventType)) { return false; } else { this.eventTypeToEventDataMapping.remove(canonicalizeEventType(eventType)); return true; } } /** * Checks if an event mapping with the given eventType exists. * * @param eventType the event type name. * @return true if the mapping exists, false otherwise. */ @Beta public boolean containsCustomEventMappingFor(final String eventType) { if (eventType == null || eventType.isEmpty()) { return false; } else { return this.eventTypeToEventDataMapping.containsKey(canonicalizeEventType(eventType)); } } /** * De-serialize the events in the given requested content using default de-serializer. * * @param requestContent the request content in string format. * @return De-serialized events. * * @throws IOException */ @Beta public EventGridEvent[] deserializeEventGridEvents(final String requestContent) throws IOException { return this.deserializeEventGridEvents(requestContent, this.defaultSerializerAdapter); } /** * De-serialize the events in the given requested content using the provided de-serializer. * * @param requestContent the request content as string. * @param serializerAdapter the de-serializer. * @return de-serialized events. * @throws IOException */ @Beta public EventGridEvent[] deserializeEventGridEvents(final String requestContent, final SerializerAdapter<ObjectMapper> serializerAdapter) throws IOException { EventGridEvent[] eventGridEvents = serializerAdapter.<EventGridEvent[]>deserialize(requestContent, EventGridEvent[].class); for (EventGridEvent receivedEvent : eventGridEvents) { if (receivedEvent.data() == null) { continue; } else { final String eventType = receivedEvent.eventType(); final Type eventDataType; if (SystemEventTypeMappings.containsMappingFor(eventType)) { eventDataType = SystemEventTypeMappings.getMapping(eventType); } else if (containsCustomEventMappingFor(eventType)) { eventDataType = getCustomEventMapping(eventType); } else { eventDataType = null; } if (eventDataType != null) { final String eventDataAsString = serializerAdapter.serializeRaw(receivedEvent.data()); final Object eventData = serializerAdapter.<Object>deserialize(eventDataAsString, eventDataType); setEventData(receivedEvent, eventData); } } } return eventGridEvents; } private static void setEventData(EventGridEvent event, final Object data) { try { Field dataField = event.getClass().getDeclaredField("data"); dataField.setAccessible(true); dataField.set(event, data); } catch (NoSuchFieldException nsfe) { throw new RuntimeException(nsfe); } catch (IllegalAccessException iae) { throw new RuntimeException(iae); } } private static String canonicalizeEventType(final String eventType) { if (eventType == null) { return null; } else { return eventType.toLowerCase(); } } }
> IOException may contian sensitive message. The old logic seems doesn't want to return it to client. Maybe it's reasonable? got it. make sense
protected void handleQuery() { MetricRepo.COUNTER_REQUEST_ALL.increase(1L); ctx.getAuditEventBuilder().reset(); ctx.getAuditEventBuilder() .setTimestamp(System.currentTimeMillis()) .setClientIp(ctx.getRemoteIP()) .setUser(ctx.getQualifiedUser()) .setAuthorizedUser( ctx.getCurrentUserIdentity() == null ? "null" : ctx.getCurrentUserIdentity().toString()) .setDb(ctx.getDatabase()) .setCatalog(ctx.getCurrentCatalog()); Tracers.register(ctx); StatementBase parsedStmt = ((HttpConnectContext) ctx).getStatement(); String sql = parsedStmt.getOrigStmt().originStmt; addRunningQueryDetail(parsedStmt); executor = new StmtExecutor(ctx, parsedStmt); ctx.setExecutor(executor); ctx.setIsLastStmt(true); if (executor.isForwardToLeader()) { LOG.warn("non-master FE can not read, forward HTTP request to master"); ((HttpConnectContext) ctx).setForwardToLeader(true); return; } try { executor.execute(); } catch (IOException e) { LOG.warn("Process one query failed because IOException: ", e); ctx.getState().setError("StarRocks process failed"); ctx.getState().setErrType(QueryState.ErrType.IO_ERR); } catch (UserException e) { LOG.warn("Process one query failed. SQL: " + sql + ", because.", e); ctx.getState().setError(e.getMessage()); ctx.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } catch (Throwable e) { LOG.warn("Process one query failed. SQL: " + sql + ", because unknown reason: ", e); ctx.getState().setError("Unexpected exception: " + e.getMessage()); if (parsedStmt instanceof KillStmt) { ctx.getState().setErrType(QueryState.ErrType.IGNORE_ERR); } else { ctx.getState().setErrType(QueryState.ErrType.INTERNAL_ERR); } } finally { Tracers.close(); } if (executor != null) { auditAfterExec(sql, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog()); } else { auditAfterExec(sql, null, null); } addFinishedQueryDetail(); }
ctx.getState().setError("StarRocks process failed");
protected void handleQuery() { MetricRepo.COUNTER_REQUEST_ALL.increase(1L); ctx.getAuditEventBuilder().reset(); ctx.getAuditEventBuilder() .setTimestamp(System.currentTimeMillis()) .setClientIp(ctx.getRemoteIP()) .setUser(ctx.getQualifiedUser()) .setAuthorizedUser( ctx.getCurrentUserIdentity() == null ? "null" : ctx.getCurrentUserIdentity().toString()) .setDb(ctx.getDatabase()) .setCatalog(ctx.getCurrentCatalog()); Tracers.register(ctx); StatementBase parsedStmt = ((HttpConnectContext) ctx).getStatement(); String sql = parsedStmt.getOrigStmt().originStmt; addRunningQueryDetail(parsedStmt); executor = new StmtExecutor(ctx, parsedStmt); ctx.setExecutor(executor); ctx.setIsLastStmt(true); if (executor.isForwardToLeader()) { LOG.warn("non-master FE can not read, forward HTTP request to master"); ((HttpConnectContext) ctx).setForwardToLeader(true); return; } try { executor.execute(); } catch (IOException e) { LOG.warn("Process one query failed because IOException: ", e); ctx.getState().setError("StarRocks process failed"); ctx.getState().setErrType(QueryState.ErrType.IO_ERR); } catch (UserException e) { LOG.warn("Process one query failed. SQL: " + sql + ", because.", e); ctx.getState().setError(e.getMessage()); ctx.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR); } catch (Throwable e) { LOG.warn("Process one query failed. SQL: " + sql + ", because unknown reason: ", e); ctx.getState().setError("Unexpected exception: " + e.getMessage()); if (parsedStmt instanceof KillStmt) { ctx.getState().setErrType(QueryState.ErrType.IGNORE_ERR); } else { ctx.getState().setErrType(QueryState.ErrType.INTERNAL_ERR); } } finally { Tracers.close(); } if (executor != null) { auditAfterExec(sql, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog()); } else { auditAfterExec(sql, null, null); } addFinishedQueryDetail(); }
class HttpConnectProcessor extends ConnectProcessor { private static final Logger LOG = LogManager.getLogger(HttpConnectProcessor.class); public HttpConnectProcessor(ConnectContext context) { super(context); } @Override @Override public void processOnce() throws IOException { ctx.getState().reset(); executor = null; ctx.setCommand(MysqlCommand.COM_QUERY); ctx.setStartTime(); ctx.setResourceGroup(null); ctx.setErrorCode(""); this.handleQuery(); ctx.setStartTime(); ctx.setCommand(MysqlCommand.COM_SLEEP); } }
class HttpConnectProcessor extends ConnectProcessor { private static final Logger LOG = LogManager.getLogger(HttpConnectProcessor.class); public HttpConnectProcessor(ConnectContext context) { super(context); } @Override @Override public void processOnce() throws IOException { ctx.getState().reset(); executor = null; ctx.setCommand(MysqlCommand.COM_QUERY); ctx.setStartTime(); ctx.setResourceGroup(null); ctx.setErrorCode(""); this.handleQuery(); ctx.setStartTime(); ctx.setCommand(MysqlCommand.COM_SLEEP); } }
Isn't this null check redundant? There's already one in https://github.com/ballerina-platform/ballerina-lang/pull/36837/files#diff-abafd09e4c17786eb7b550f8eccbeb794ce71bff4e383bdaa9b7ecf203b169aeR1318
public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerSymbol == null) { return; } addIfSameSymbol(workerFlushExpr.workerSymbol, workerFlushExpr.workerIdentifier.pos); }
if (workerFlushExpr.workerSymbol == null) {
public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier == null) { return; } addIfSameSymbol(workerFlushExpr.workerSymbol, workerFlushExpr.workerIdentifier.pos); }
class ReferenceFinder extends BaseVisitor { private final boolean withDefinition; private List<Location> referenceLocations; private BSymbol targetSymbol; public ReferenceFinder(boolean withDefinition) { this.withDefinition = withDefinition; } public List<Location> findReferences(BLangNode node, BSymbol symbol) { this.referenceLocations = new ArrayList<>(); this.targetSymbol = symbol; find(node); return this.referenceLocations; } void find(BLangNode node) { if (node == null) { return; } node.accept(this); } void find(List<? extends BLangNode> nodes) { for (BLangNode node : nodes) { find(node); } } @Override public void visit(BLangPackage pkgNode) { find(pkgNode.imports); find(pkgNode.xmlnsList); find(pkgNode.constants); find(pkgNode.globalVars); find(pkgNode.services); find(pkgNode.annotations); find(pkgNode.typeDefinitions); find(pkgNode.classDefinitions.stream() .filter(c -> !isGeneratedClassDefForService(c)) .collect(Collectors.toList())); find(pkgNode.functions.stream() .filter(f -> !f.flagSet.contains(Flag.LAMBDA)) .collect(Collectors.toList())); if (!(pkgNode instanceof BLangTestablePackage)) { find(pkgNode.getTestablePkg()); } } @Override public void visit(BLangImportPackage importPkgNode) { if (importPkgNode.symbol != null && this.targetSymbol.name.equals(importPkgNode.symbol.name) && this.targetSymbol.pkgID.equals(importPkgNode.symbol.pkgID) && this.targetSymbol.pos.equals(importPkgNode.symbol.pos) && this.withDefinition) { this.referenceLocations.add(importPkgNode.alias.pos); } } @Override public void visit(BLangCompilationUnit unit) { unit.getTopLevelNodes().forEach(topLevelNode -> find((BLangNode) topLevelNode)); } @Override public void visit(BLangXMLNS xmlnsNode) { find(xmlnsNode.namespaceURI); addIfSameSymbol(xmlnsNode.symbol, xmlnsNode.prefix.pos); } @Override public void visit(BLangFunction funcNode) { find(funcNode.annAttachments); find(funcNode.requiredParams); find(funcNode.restParam); find(funcNode.returnTypeAnnAttachments); find(funcNode.returnTypeNode); find(funcNode.body); if (funcNode.symbol.origin != VIRTUAL) { addIfSameSymbol(funcNode.symbol, funcNode.name.pos); } } @Override public void visit(BLangResourceFunction resourceFunction) { visit((BLangFunction) resourceFunction); } @Override public void visit(BLangBlockFunctionBody blockFuncBody) { for (BLangStatement stmt : blockFuncBody.stmts) { find(stmt); } } @Override public void visit(BLangExprFunctionBody exprFuncBody) { find(exprFuncBody.expr); } @Override public void visit(BLangExternalFunctionBody externFuncBody) { find(externFuncBody.annAttachments); } @Override public void visit(BLangService serviceNode) { find(serviceNode.annAttachments); find(serviceNode.serviceClass); find(serviceNode.attachedExprs); } @Override public void visit(BLangTypeDefinition typeDefinition) { if (!typeDefinition.flagSet.contains(Flag.ENUM)) { find(typeDefinition.typeNode); } find(typeDefinition.annAttachments); addIfSameSymbol(typeDefinition.symbol, typeDefinition.name.pos); } @Override public void visit(BLangConstant constant) { find(constant.typeNode); find(constant.expr); addIfSameSymbol(constant.symbol, constant.name.pos); } @Override public void visit(BLangSimpleVariable varNode) { find(varNode.annAttachments); find(varNode.typeNode); find(varNode.expr); addIfSameSymbol(varNode.symbol, varNode.name.pos); } @Override public void visit(BLangAnnotation annotationNode) { find(annotationNode.annAttachments); find(annotationNode.typeNode); addIfSameSymbol(annotationNode.symbol, annotationNode.name.pos); } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { find(annAttachmentNode.expr); if (!annAttachmentNode.pkgAlias.value.isEmpty() && annAttachmentNode.annotationSymbol != null && addIfSameSymbol(annAttachmentNode.annotationSymbol.owner, annAttachmentNode.pkgAlias.pos)) { return; } addIfSameSymbol(annAttachmentNode.annotationSymbol, annAttachmentNode.annotationName.pos); } @Override public void visit(BLangTableKeySpecifier tableKeySpecifierNode) { } @Override public void visit(BLangTableKeyTypeConstraint tableKeyTypeConstraint) { find(tableKeyTypeConstraint.keyType); } @Override public void visit(BLangBlockStmt blockNode) { for (BLangStatement stmt : blockNode.stmts) { find(stmt); } } @Override public void visit(BLangLock.BLangLockStmt lockStmtNode) { } @Override public void visit(BLangLock.BLangUnLockStmt unLockNode) { } @Override public void visit(BLangSimpleVariableDef varDefNode) { find(varDefNode.var); } @Override public void visit(BLangAssignment assignNode) { find(assignNode.expr); find(assignNode.varRef); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { find(compoundAssignNode.expr); find(compoundAssignNode.varRef); } @Override public void visit(BLangRetry retryNode) { find(retryNode.retrySpec); find(retryNode.retryBody); find(retryNode.onFailClause); } @Override public void visit(BLangRetryTransaction retryTransaction) { find(retryTransaction.retrySpec); find(retryTransaction.transaction); } @Override public void visit(BLangRetrySpec retrySpec) { find(retrySpec.argExprs); find(retrySpec.retryManagerType); } @Override public void visit(BLangReturn returnNode) { find(returnNode.expr); } @Override public void visit(BLangPanic panicNode) { find(panicNode.expr); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { find(xmlnsStmtNode.xmlnsDecl); } @Override public void visit(BLangExpressionStmt exprStmtNode) { find(exprStmtNode.expr); } @Override public void visit(BLangIf ifNode) { find(ifNode.expr); find(ifNode.body); find(ifNode.elseStmt); } @Override public void visit(BLangQueryAction queryAction) { find(queryAction.doClause); find(queryAction.queryClauseList); } @Override public void visit(BLangMatchStatement matchStatementNode) { find(matchStatementNode.expr); find(matchStatementNode.matchClauses); find(matchStatementNode.onFailClause); } @Override public void visit(BLangMatchGuard matchGuard) { find(matchGuard.expr); } @Override public void visit(BLangConstPattern constMatchPattern) { find(constMatchPattern.expr); } @Override public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) { find(varBindingPattern.getBindingPattern()); } @Override public void visit(BLangErrorMatchPattern errorMatchPattern) { find(errorMatchPattern.errorMessageMatchPattern); find(errorMatchPattern.errorTypeReference); find(errorMatchPattern.errorCauseMatchPattern); find(errorMatchPattern.errorFieldMatchPatterns); } @Override public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) { find(errorMessageMatchPattern.simpleMatchPattern); } @Override public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) { find(errorCauseMatchPattern.simpleMatchPattern); find(errorCauseMatchPattern.errorMatchPattern); } @Override public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) { find(errorFieldMatchPatterns.namedArgMatchPatterns); find(errorFieldMatchPatterns.restMatchPattern); } @Override public void visit(BLangSimpleMatchPattern simpleMatchPattern) { find(simpleMatchPattern.varVariableName); find(simpleMatchPattern.constPattern); } @Override public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) { find(namedArgMatchPattern.matchPattern); } @Override public void visit(BLangCaptureBindingPattern captureBindingPattern) { addIfSameSymbol(captureBindingPattern.symbol, captureBindingPattern.getIdentifier().getPosition()); } @Override public void visit(BLangListBindingPattern listBindingPattern) { find(listBindingPattern.bindingPatterns); find(listBindingPattern.restBindingPattern); } @Override public void visit(BLangMappingBindingPattern mappingBindingPattern) { find(mappingBindingPattern.fieldBindingPatterns); find(mappingBindingPattern.restBindingPattern); } @Override public void visit(BLangFieldBindingPattern fieldBindingPattern) { find(fieldBindingPattern.bindingPattern); } @Override public void visit(BLangRestBindingPattern restBindingPattern) { addIfSameSymbol(restBindingPattern.symbol, restBindingPattern.getIdentifier().getPosition()); } @Override public void visit(BLangErrorBindingPattern errorBindingPattern) { find(errorBindingPattern.errorMessageBindingPattern); find(errorBindingPattern.errorTypeReference); find(errorBindingPattern.errorCauseBindingPattern); find(errorBindingPattern.errorFieldBindingPatterns); } @Override public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) { find(errorMessageBindingPattern.simpleBindingPattern); } @Override public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) { find(errorCauseBindingPattern.simpleBindingPattern); find(errorCauseBindingPattern.errorBindingPattern); } @Override public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) { find(errorFieldBindingPatterns.namedArgBindingPatterns); find(errorFieldBindingPatterns.restBindingPattern); } @Override public void visit(BLangSimpleBindingPattern simpleBindingPattern) { find(simpleBindingPattern.captureBindingPattern); } @Override public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) { find(namedArgBindingPattern.bindingPattern); } @Override public void visit(BLangForeach foreach) { find((BLangNode) foreach.variableDefinitionNode); find(foreach.collection); find(foreach.body); find(foreach.onFailClause); } @Override public void visit(BLangDo doNode) { find(doNode.body); find(doNode.onFailClause); } @Override public void visit(BLangFail failNode) { find(failNode.expr); } @Override public void visit(BLangFromClause fromClause) { find((BLangNode) fromClause.variableDefinitionNode); find(fromClause.collection); } @Override public void visit(BLangJoinClause joinClause) { find((BLangNode) joinClause.variableDefinitionNode); find((BLangOnClause) joinClause.onClause); find(joinClause.collection); } @Override public void visit(BLangLetClause letClause) { for (BLangLetVariable letVariable : letClause.letVarDeclarations) { find((BLangNode) letVariable.definitionNode); } } @Override public void visit(BLangOnClause onClause) { find(onClause.lhsExpr); find(onClause.rhsExpr); } @Override public void visit(BLangOrderKey orderKeyClause) { find(orderKeyClause.expression); } @Override public void visit(BLangOrderByClause orderByClause) { for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) { find((BLangOrderKey) orderKeyNode); } } @Override public void visit(BLangSelectClause selectClause) { find(selectClause.expression); } @Override public void visit(BLangWhereClause whereClause) { find(whereClause.expression); } @Override public void visit(BLangDoClause doClause) { find(doClause.body); } @Override public void visit(BLangOnFailClause onFailClause) { find((BLangNode) onFailClause.variableDefinitionNode); find(onFailClause.body); } @Override public void visit(BLangOnConflictClause onConflictClause) { find(onConflictClause.expression); } @Override public void visit(BLangLimitClause limitClause) { find(limitClause.expression); } @Override public void visit(BLangMatchClause matchClause) { find(matchClause.matchPatterns); find(matchClause.matchGuard); find(matchClause.blockStmt); } @Override public void visit(BLangWhile whileNode) { find(whileNode.expr); find(whileNode.body); find(whileNode.onFailClause); } @Override public void visit(BLangLock lockNode) { find(lockNode.body); find(lockNode.onFailClause); } @Override public void visit(BLangTransaction transactionNode) { find(transactionNode.transactionBody); find(transactionNode.onFailClause); } @Override public void visit(BLangTupleDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangRecordDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangErrorDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { find(forkJoin.workers); } @Override public void visit(BLangWorkerSend workerSendNode) { find(workerSendNode.expr); addIfSameSymbol(workerSendNode.workerSymbol, workerSendNode.workerIdentifier.pos); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { addIfSameSymbol(workerReceiveNode.workerSymbol, workerReceiveNode.workerIdentifier.pos); } @Override public void visit(BLangRollback rollbackNode) { find(rollbackNode.expr); } @Override public void visit(BLangConstRef constRef) { if (!constRef.pkgAlias.value.isEmpty()) { addIfSameSymbol(constRef.symbol.owner, constRef.pkgAlias.pos); } addIfSameSymbol(constRef.symbol, constRef.variableName.pos); } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { find((BLangNode) field); } } @Override public void visit(BLangTupleVarRef varRefExpr) { find(varRefExpr.expressions); find((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangRecordVarRef varRefExpr) { for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { find(recordRefField.getBindingPattern()); } find((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangErrorVarRef varRefExpr) { find(varRefExpr.typeNode); find(varRefExpr.message); find(varRefExpr.cause); find(varRefExpr.restVar); if (varRefExpr.typeNode != null) { find(varRefExpr.detail); } else { visitNamedArgWithoutAddingSymbol(varRefExpr.detail); } } @Override public void visit(BLangSimpleVarRef varRefExpr) { if (varRefExpr.symbol == null) { return; } if (varRefExpr.pkgAlias != null && !varRefExpr.pkgAlias.value.isEmpty() && addIfSameSymbol(varRefExpr.symbol.owner, varRefExpr.pkgAlias.pos)) { return; } addIfSameSymbol(varRefExpr.symbol, varRefExpr.variableName.pos); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { find(fieldAccessExpr.expr); addIfSameSymbol(fieldAccessExpr.symbol, fieldAccessExpr.field.pos); } @Override public void visit(BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { find(nsPrefixedFieldBasedAccess.expr); addIfSameSymbol(nsPrefixedFieldBasedAccess.nsSymbol, nsPrefixedFieldBasedAccess.nsPrefix.pos); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { find(indexAccessExpr.expr); if (indexAccessExpr.indexExpr instanceof BLangLiteral) { addIfSameSymbol(indexAccessExpr.symbol, getLocationForLiteral(indexAccessExpr.indexExpr.pos)); } else { find(indexAccessExpr.indexExpr); } } @Override public void visit(BLangInvocation invocationExpr) { if (!invocationExpr.langLibInvocation) { find(invocationExpr.expr); } find(invocationExpr.annAttachments); find(invocationExpr.argExprs); if (!invocationExpr.pkgAlias.value.isEmpty() && invocationExpr.symbol != null) { addIfSameSymbol(invocationExpr.symbol.owner, invocationExpr.pkgAlias.pos); } addIfSameSymbol(invocationExpr.symbol, invocationExpr.name.pos); } @Override public void visit(BLangTypeInit typeInit) { find(typeInit.userDefinedType); find(typeInit.argsExpr); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { find(actionInvocationExpr.expr); find(actionInvocationExpr.requiredArgs); find(actionInvocationExpr.annAttachments); find(actionInvocationExpr.restArgs); if (!actionInvocationExpr.pkgAlias.value.isEmpty()) { addIfSameSymbol(actionInvocationExpr.symbol.owner, actionInvocationExpr.pkgAlias.pos); } addIfSameSymbol(actionInvocationExpr.symbol, actionInvocationExpr.name.pos); } @Override public void visit(BLangTernaryExpr ternaryExpr) { find(ternaryExpr.expr); find(ternaryExpr.thenExpr); find(ternaryExpr.elseExpr); } @Override public void visit(BLangWaitExpr waitExpr) { find(waitExpr.exprList); } @Override public void visit(BLangTrapExpr trapExpr) { find(trapExpr.expr); } @Override public void visit(BLangBinaryExpr binaryExpr) { find(binaryExpr.lhsExpr); find(binaryExpr.rhsExpr); } @Override public void visit(BLangElvisExpr elvisExpr) { find(elvisExpr.lhsExpr); find(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { find(groupExpr.expression); } @Override public void visit(BLangLetExpression letExpr) { for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) { find((BLangNode) letVarDeclaration.definitionNode); } find(letExpr.expr); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { find(listConstructorExpr.exprs); } @Override public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) { find(spreadOpExpr.expr); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { find(tableConstructorExpr.recordLiteralList); find(tableConstructorExpr.tableKeySpecifier); } @Override public void visit(BLangUnaryExpr unaryExpr) { find(unaryExpr.expr); } @Override public void visit(BLangTypedescExpr typedescExpr) { find(typedescExpr.typeNode); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { find(conversionExpr.annAttachments); find(conversionExpr.typeNode); find(conversionExpr.expr); } @Override public void visit(BLangXMLQName xmlQName) { addIfSameSymbol(xmlQName.nsSymbol, xmlQName.prefix.pos); } @Override public void visit(BLangXMLAttribute xmlAttribute) { find(xmlAttribute.name); find(xmlAttribute.value); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { find(xmlElementLiteral.startTagName); find(xmlElementLiteral.endTagName); find(xmlElementLiteral.children); find(xmlElementLiteral.attributes); find(xmlElementLiteral.inlineNamespaces); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { find(xmlTextLiteral.textFragments); find(xmlTextLiteral.concatExpr); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { find(xmlCommentLiteral.textFragments); find(xmlCommentLiteral.concatExpr); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { find(xmlProcInsLiteral.target); find(xmlProcInsLiteral.dataFragments); find(xmlProcInsLiteral.dataConcatExpr); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { find(xmlQuotedString.textFragments); find(xmlQuotedString.concatExpr); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { find(stringTemplateLiteral.exprs); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { find(rawTemplateLiteral.insertions); find(rawTemplateLiteral.strings); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { find(bLangLambdaFunction.function); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { find(bLangArrowFunction.params); find(bLangArrowFunction.body); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { find(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { find(bLangNamedArgsExpression.expr); addIfSameSymbol(bLangNamedArgsExpression.varSymbol, bLangNamedArgsExpression.name.pos); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { find(assignableExpr.lhsExpr); find(assignableExpr.typeNode); } @Override public void visit(BLangCheckedExpr checkedExpr) { find(checkedExpr.expr); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { find(checkPanickedExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { find(serviceConstructorExpr.serviceNode); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { find(typeTestExpr.expr); find(typeTestExpr.typeNode); } @Override public void visit(BLangIsLikeExpr typeTestExpr) { find(typeTestExpr.expr); find(typeTestExpr.typeNode); } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { find(annotAccessExpr.expr); addIfSameSymbol(annotAccessExpr.annotationSymbol, annotAccessExpr.annotationName.pos); } @Override public void visit(BLangQueryExpr queryExpr) { find(queryExpr.queryClauseList); } @Override public void visit(BLangObjectConstructorExpression objConstructor) { find(objConstructor.classNode); } @Override public void visit(BLangArrayType arrayType) { find(arrayType.elemtype); for (BLangExpression size : arrayType.sizes) { find(size); } } @Override public void visit(BLangConstrainedType constrainedType) { find(constrainedType.type); find(constrainedType.constraint); } @Override public void visit(BLangStreamType streamType) { find(streamType.constraint); find(streamType.error); } @Override public void visit(BLangTableTypeNode tableType) { find(tableType.constraint); find(tableType.tableKeySpecifier); find(tableType.tableKeyTypeConstraint); } @Override public void visit(BLangUserDefinedType userDefinedType) { if (userDefinedType.symbol == null) { return; } if (!userDefinedType.pkgAlias.value.isEmpty()) { addIfSameSymbol(userDefinedType.symbol.owner, userDefinedType.pkgAlias.pos); } addIfSameSymbol(userDefinedType.symbol, userDefinedType.typeName.pos); } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { find(functionTypeNode.params); find(functionTypeNode.restParam); find(functionTypeNode.returnTypeNode); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { find(unionTypeNode.memberTypeNodes); } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { find(intersectionTypeNode.constituentTypeNodes); } @Override public void visit(BLangObjectTypeNode objectTypeNode) { find(objectTypeNode.fields); find(objectTypeNode.functions); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { find(recordTypeNode.typeRefs); find(recordTypeNode.fields); find(recordTypeNode.restFieldType); } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { find(finiteTypeNode.valueSpace); } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { find(tupleTypeNode.memberTypeNodes); find(tupleTypeNode.restParamType); } @Override public void visit(BLangErrorType errorType) { find(errorType.detailType); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { find(errorConstructorExpr.errorTypeRef); find(errorConstructorExpr.positionalArgs); find(errorConstructorExpr.namedArgs); } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { find(bLangXMLSequenceLiteral.xmlItems); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { find(bLangTupleVariable.annAttachments); find(bLangTupleVariable.typeNode); find(bLangTupleVariable.memberVariables); find(bLangTupleVariable.restVariable); find(bLangTupleVariable.expr); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { find(bLangTupleVariableDef.var); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { find(bLangRecordVariable.annAttachments); find(bLangRecordVariable.typeNode); for (BLangRecordVariable.BLangRecordVariableKeyValue variableKeyValue : bLangRecordVariable.variableList) { find(variableKeyValue.valueBindingPattern); } find(bLangRecordVariable.expr); find(bLangRecordVariable.restParam); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { find(bLangRecordVariableDef.var); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { find(bLangErrorVariable.annAttachments); find(bLangErrorVariable.typeNode); find(bLangErrorVariable.message); find(bLangErrorVariable.restDetail); find(bLangErrorVariable.cause); find(bLangErrorVariable.reasonMatchConst); find(bLangErrorVariable.expr); for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : bLangErrorVariable.detail) { find(errorDetailEntry.valueBindingPattern); addIfSameSymbol(errorDetailEntry.keySymbol, errorDetailEntry.key.pos); } } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { find(bLangErrorVariableDef.errorVariable); } @Override @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { find(syncSendExpr.expr); addIfSameSymbol(syncSendExpr.workerSymbol, syncSendExpr.workerIdentifier.pos); } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { find(waitForAllExpr.keyValuePairs); } @Override public void visit(BLangRecordLiteral.BLangRecordKeyValueField recordKeyValue) { find(recordKeyValue.key); find(recordKeyValue.valueExpr); } @Override public void visit(BLangRecordLiteral.BLangRecordKey recordKey) { find(recordKey.expr); addIfSameSymbol(recordKey.fieldSymbol, recordKey.pos); } @Override public void visit(BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField) { find(spreadOperatorField.expr); } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { find(waitKeyValue.keyExpr); find(waitKeyValue.valueExpr); } @Override public void visit(BLangXMLElementFilter xmlElementFilter) { addIfSameSymbol(xmlElementFilter.namespaceSymbol, xmlElementFilter.nsPos); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { find(xmlElementAccess.expr); find(xmlElementAccess.filters); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { find(xmlNavigation.childIndex); find(xmlNavigation.filters); find(xmlNavigation.expr); } @Override public void visit(BLangClassDefinition classDefinition) { find(classDefinition.annAttachments); find(classDefinition.fields); find(classDefinition.initFunction); find(classDefinition.functions); find(classDefinition.typeRefs); addIfSameSymbol(classDefinition.symbol, classDefinition.name.pos); } @Override public void visit(BLangListMatchPattern listMatchPattern) { find(listMatchPattern.matchPatterns); find(listMatchPattern.restMatchPattern); } @Override public void visit(BLangMappingMatchPattern mappingMatchPattern) { find(mappingMatchPattern.fieldMatchPatterns); find(mappingMatchPattern.restMatchPattern); } @Override public void visit(BLangFieldMatchPattern fieldMatchPattern) { find(fieldMatchPattern.matchPattern); } @Override public void visit(BLangRestMatchPattern restMatchPattern) { addIfSameSymbol(restMatchPattern.symbol, restMatchPattern.variableName.pos); } @Override public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) { find(resourceAccessInvocation.expr); find(resourceAccessInvocation.requiredArgs); find(resourceAccessInvocation.annAttachments); find(resourceAccessInvocation.restArgs); find(resourceAccessInvocation.resourceAccessPathSegments); if (!resourceAccessInvocation.pkgAlias.value.isEmpty()) { addIfSameSymbol(resourceAccessInvocation.symbol.owner, resourceAccessInvocation.pkgAlias.pos); } addIfSameSymbol(resourceAccessInvocation.symbol, resourceAccessInvocation.pos); } private void visitNamedArgWithoutAddingSymbol(List<BLangNamedArgsExpression> args) { for (BLangNamedArgsExpression arg : args) { find(arg.expr); } } private boolean addIfSameSymbol(BSymbol symbol, Location location) { if (symbol != null && this.targetSymbol.name.equals(symbol.name) && this.targetSymbol.pkgID.equals(symbol.pkgID) && this.targetSymbol.pos.equals(symbol.pos) && (this.withDefinition || !symbol.pos.equals(location))) { this.referenceLocations.add(location); return true; } return false; } private boolean isGeneratedClassDefForService(BLangClassDefinition clazz) { return clazz.flagSet.contains(Flag.ANONYMOUS) && clazz.flagSet.contains(Flag.SERVICE); } /** * This method is intended to be used for getting the location of a string value with the surrounding quotes * disregarded. If we give the original location, it'd be problematic for use cases such as renaming since we only * return a list of locations of references. Without further contextual info, it'll be hard to determine whether a * particular reference location is a string value. * * @param location Location of the string * @return The modified location with the quotes diregarded */ private Location getLocationForLiteral(Location location) { LineRange lineRange = location.lineRange(); return new BLangDiagnosticLocation(lineRange.filePath(), lineRange.startLine().line(), lineRange.endLine().line(), lineRange.startLine().offset() + 1, lineRange.endLine().offset() - 1, location.textRange().startOffset(), location.textRange().length()); } }
class ReferenceFinder extends BaseVisitor { private final boolean withDefinition; private List<Location> referenceLocations; private BSymbol targetSymbol; public ReferenceFinder(boolean withDefinition) { this.withDefinition = withDefinition; } public List<Location> findReferences(BLangNode node, BSymbol symbol) { this.referenceLocations = new ArrayList<>(); this.targetSymbol = symbol; find(node); return this.referenceLocations; } void find(BLangNode node) { if (node == null) { return; } node.accept(this); } void find(List<? extends BLangNode> nodes) { for (BLangNode node : nodes) { find(node); } } @Override public void visit(BLangPackage pkgNode) { find(pkgNode.imports); find(pkgNode.xmlnsList); find(pkgNode.constants); find(pkgNode.globalVars); find(pkgNode.services); find(pkgNode.annotations); find(pkgNode.typeDefinitions); find(pkgNode.classDefinitions.stream() .filter(c -> !isGeneratedClassDefForService(c)) .collect(Collectors.toList())); find(pkgNode.functions.stream() .filter(f -> !f.flagSet.contains(Flag.LAMBDA)) .collect(Collectors.toList())); if (!(pkgNode instanceof BLangTestablePackage)) { find(pkgNode.getTestablePkg()); } } @Override public void visit(BLangImportPackage importPkgNode) { if (importPkgNode.symbol != null && this.targetSymbol.name.equals(importPkgNode.symbol.name) && this.targetSymbol.pkgID.equals(importPkgNode.symbol.pkgID) && this.targetSymbol.pos.equals(importPkgNode.symbol.pos) && this.withDefinition) { this.referenceLocations.add(importPkgNode.alias.pos); } } @Override public void visit(BLangCompilationUnit unit) { unit.getTopLevelNodes().forEach(topLevelNode -> find((BLangNode) topLevelNode)); } @Override public void visit(BLangXMLNS xmlnsNode) { find(xmlnsNode.namespaceURI); addIfSameSymbol(xmlnsNode.symbol, xmlnsNode.prefix.pos); } @Override public void visit(BLangFunction funcNode) { find(funcNode.annAttachments); find(funcNode.requiredParams); find(funcNode.restParam); find(funcNode.returnTypeAnnAttachments); find(funcNode.returnTypeNode); find(funcNode.body); if (funcNode.symbol.origin != VIRTUAL) { addIfSameSymbol(funcNode.symbol, funcNode.name.pos); } } @Override public void visit(BLangResourceFunction resourceFunction) { visit((BLangFunction) resourceFunction); } @Override public void visit(BLangBlockFunctionBody blockFuncBody) { for (BLangStatement stmt : blockFuncBody.stmts) { find(stmt); } } @Override public void visit(BLangExprFunctionBody exprFuncBody) { find(exprFuncBody.expr); } @Override public void visit(BLangExternalFunctionBody externFuncBody) { find(externFuncBody.annAttachments); } @Override public void visit(BLangService serviceNode) { find(serviceNode.annAttachments); find(serviceNode.serviceClass); find(serviceNode.attachedExprs); } @Override public void visit(BLangTypeDefinition typeDefinition) { if (!typeDefinition.flagSet.contains(Flag.ENUM)) { find(typeDefinition.typeNode); } find(typeDefinition.annAttachments); addIfSameSymbol(typeDefinition.symbol, typeDefinition.name.pos); } @Override public void visit(BLangConstant constant) { find(constant.typeNode); find(constant.expr); addIfSameSymbol(constant.symbol, constant.name.pos); } @Override public void visit(BLangSimpleVariable varNode) { find(varNode.annAttachments); find(varNode.typeNode); find(varNode.expr); addIfSameSymbol(varNode.symbol, varNode.name.pos); } @Override public void visit(BLangAnnotation annotationNode) { find(annotationNode.annAttachments); find(annotationNode.typeNode); addIfSameSymbol(annotationNode.symbol, annotationNode.name.pos); } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { find(annAttachmentNode.expr); if (!annAttachmentNode.pkgAlias.value.isEmpty() && annAttachmentNode.annotationSymbol != null && addIfSameSymbol(annAttachmentNode.annotationSymbol.owner, annAttachmentNode.pkgAlias.pos)) { return; } addIfSameSymbol(annAttachmentNode.annotationSymbol, annAttachmentNode.annotationName.pos); } @Override public void visit(BLangTableKeySpecifier tableKeySpecifierNode) { } @Override public void visit(BLangTableKeyTypeConstraint tableKeyTypeConstraint) { find(tableKeyTypeConstraint.keyType); } @Override public void visit(BLangBlockStmt blockNode) { for (BLangStatement stmt : blockNode.stmts) { find(stmt); } } @Override public void visit(BLangLock.BLangLockStmt lockStmtNode) { } @Override public void visit(BLangLock.BLangUnLockStmt unLockNode) { } @Override public void visit(BLangSimpleVariableDef varDefNode) { find(varDefNode.var); } @Override public void visit(BLangAssignment assignNode) { find(assignNode.expr); find(assignNode.varRef); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { find(compoundAssignNode.expr); find(compoundAssignNode.varRef); } @Override public void visit(BLangRetry retryNode) { find(retryNode.retrySpec); find(retryNode.retryBody); find(retryNode.onFailClause); } @Override public void visit(BLangRetryTransaction retryTransaction) { find(retryTransaction.retrySpec); find(retryTransaction.transaction); } @Override public void visit(BLangRetrySpec retrySpec) { find(retrySpec.argExprs); find(retrySpec.retryManagerType); } @Override public void visit(BLangReturn returnNode) { find(returnNode.expr); } @Override public void visit(BLangPanic panicNode) { find(panicNode.expr); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { find(xmlnsStmtNode.xmlnsDecl); } @Override public void visit(BLangExpressionStmt exprStmtNode) { find(exprStmtNode.expr); } @Override public void visit(BLangIf ifNode) { find(ifNode.expr); find(ifNode.body); find(ifNode.elseStmt); } @Override public void visit(BLangQueryAction queryAction) { find(queryAction.doClause); find(queryAction.queryClauseList); } @Override public void visit(BLangMatchStatement matchStatementNode) { find(matchStatementNode.expr); find(matchStatementNode.matchClauses); find(matchStatementNode.onFailClause); } @Override public void visit(BLangMatchGuard matchGuard) { find(matchGuard.expr); } @Override public void visit(BLangConstPattern constMatchPattern) { find(constMatchPattern.expr); } @Override public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) { find(varBindingPattern.getBindingPattern()); } @Override public void visit(BLangErrorMatchPattern errorMatchPattern) { find(errorMatchPattern.errorMessageMatchPattern); find(errorMatchPattern.errorTypeReference); find(errorMatchPattern.errorCauseMatchPattern); find(errorMatchPattern.errorFieldMatchPatterns); } @Override public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) { find(errorMessageMatchPattern.simpleMatchPattern); } @Override public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) { find(errorCauseMatchPattern.simpleMatchPattern); find(errorCauseMatchPattern.errorMatchPattern); } @Override public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) { find(errorFieldMatchPatterns.namedArgMatchPatterns); find(errorFieldMatchPatterns.restMatchPattern); } @Override public void visit(BLangSimpleMatchPattern simpleMatchPattern) { find(simpleMatchPattern.varVariableName); find(simpleMatchPattern.constPattern); } @Override public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) { find(namedArgMatchPattern.matchPattern); } @Override public void visit(BLangCaptureBindingPattern captureBindingPattern) { addIfSameSymbol(captureBindingPattern.symbol, captureBindingPattern.getIdentifier().getPosition()); } @Override public void visit(BLangListBindingPattern listBindingPattern) { find(listBindingPattern.bindingPatterns); find(listBindingPattern.restBindingPattern); } @Override public void visit(BLangMappingBindingPattern mappingBindingPattern) { find(mappingBindingPattern.fieldBindingPatterns); find(mappingBindingPattern.restBindingPattern); } @Override public void visit(BLangFieldBindingPattern fieldBindingPattern) { find(fieldBindingPattern.bindingPattern); } @Override public void visit(BLangRestBindingPattern restBindingPattern) { addIfSameSymbol(restBindingPattern.symbol, restBindingPattern.getIdentifier().getPosition()); } @Override public void visit(BLangErrorBindingPattern errorBindingPattern) { find(errorBindingPattern.errorMessageBindingPattern); find(errorBindingPattern.errorTypeReference); find(errorBindingPattern.errorCauseBindingPattern); find(errorBindingPattern.errorFieldBindingPatterns); } @Override public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) { find(errorMessageBindingPattern.simpleBindingPattern); } @Override public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) { find(errorCauseBindingPattern.simpleBindingPattern); find(errorCauseBindingPattern.errorBindingPattern); } @Override public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) { find(errorFieldBindingPatterns.namedArgBindingPatterns); find(errorFieldBindingPatterns.restBindingPattern); } @Override public void visit(BLangSimpleBindingPattern simpleBindingPattern) { find(simpleBindingPattern.captureBindingPattern); } @Override public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) { find(namedArgBindingPattern.bindingPattern); } @Override public void visit(BLangForeach foreach) { find((BLangNode) foreach.variableDefinitionNode); find(foreach.collection); find(foreach.body); find(foreach.onFailClause); } @Override public void visit(BLangDo doNode) { find(doNode.body); find(doNode.onFailClause); } @Override public void visit(BLangFail failNode) { find(failNode.expr); } @Override public void visit(BLangFromClause fromClause) { find((BLangNode) fromClause.variableDefinitionNode); find(fromClause.collection); } @Override public void visit(BLangJoinClause joinClause) { find((BLangNode) joinClause.variableDefinitionNode); find((BLangOnClause) joinClause.onClause); find(joinClause.collection); } @Override public void visit(BLangLetClause letClause) { for (BLangLetVariable letVariable : letClause.letVarDeclarations) { find((BLangNode) letVariable.definitionNode); } } @Override public void visit(BLangOnClause onClause) { find(onClause.lhsExpr); find(onClause.rhsExpr); } @Override public void visit(BLangOrderKey orderKeyClause) { find(orderKeyClause.expression); } @Override public void visit(BLangOrderByClause orderByClause) { for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) { find((BLangOrderKey) orderKeyNode); } } @Override public void visit(BLangSelectClause selectClause) { find(selectClause.expression); } @Override public void visit(BLangWhereClause whereClause) { find(whereClause.expression); } @Override public void visit(BLangDoClause doClause) { find(doClause.body); } @Override public void visit(BLangOnFailClause onFailClause) { find((BLangNode) onFailClause.variableDefinitionNode); find(onFailClause.body); } @Override public void visit(BLangOnConflictClause onConflictClause) { find(onConflictClause.expression); } @Override public void visit(BLangLimitClause limitClause) { find(limitClause.expression); } @Override public void visit(BLangMatchClause matchClause) { find(matchClause.matchPatterns); find(matchClause.matchGuard); find(matchClause.blockStmt); } @Override public void visit(BLangWhile whileNode) { find(whileNode.expr); find(whileNode.body); find(whileNode.onFailClause); } @Override public void visit(BLangLock lockNode) { find(lockNode.body); find(lockNode.onFailClause); } @Override public void visit(BLangTransaction transactionNode) { find(transactionNode.transactionBody); find(transactionNode.onFailClause); } @Override public void visit(BLangTupleDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangRecordDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangErrorDestructure stmt) { find(stmt.expr); find(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { find(forkJoin.workers); } @Override public void visit(BLangWorkerSend workerSendNode) { find(workerSendNode.expr); addIfSameSymbol(workerSendNode.workerSymbol, workerSendNode.workerIdentifier.pos); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { addIfSameSymbol(workerReceiveNode.workerSymbol, workerReceiveNode.workerIdentifier.pos); } @Override public void visit(BLangRollback rollbackNode) { find(rollbackNode.expr); } @Override public void visit(BLangConstRef constRef) { if (!constRef.pkgAlias.value.isEmpty()) { addIfSameSymbol(constRef.symbol.owner, constRef.pkgAlias.pos); } addIfSameSymbol(constRef.symbol, constRef.variableName.pos); } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { find((BLangNode) field); } } @Override public void visit(BLangTupleVarRef varRefExpr) { find(varRefExpr.expressions); find((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangRecordVarRef varRefExpr) { for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { find(recordRefField.getBindingPattern()); } find((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangErrorVarRef varRefExpr) { find(varRefExpr.typeNode); find(varRefExpr.message); find(varRefExpr.cause); find(varRefExpr.restVar); if (varRefExpr.typeNode != null) { find(varRefExpr.detail); } else { visitNamedArgWithoutAddingSymbol(varRefExpr.detail); } } @Override public void visit(BLangSimpleVarRef varRefExpr) { if (varRefExpr.symbol == null) { return; } if (varRefExpr.pkgAlias != null && !varRefExpr.pkgAlias.value.isEmpty() && addIfSameSymbol(varRefExpr.symbol.owner, varRefExpr.pkgAlias.pos)) { return; } addIfSameSymbol(varRefExpr.symbol, varRefExpr.variableName.pos); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { find(fieldAccessExpr.expr); addIfSameSymbol(fieldAccessExpr.symbol, fieldAccessExpr.field.pos); } @Override public void visit(BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { find(nsPrefixedFieldBasedAccess.expr); addIfSameSymbol(nsPrefixedFieldBasedAccess.nsSymbol, nsPrefixedFieldBasedAccess.nsPrefix.pos); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { find(indexAccessExpr.expr); if (indexAccessExpr.indexExpr instanceof BLangLiteral) { addIfSameSymbol(indexAccessExpr.symbol, getLocationForLiteral(indexAccessExpr.indexExpr.pos)); } else { find(indexAccessExpr.indexExpr); } } @Override public void visit(BLangInvocation invocationExpr) { if (!invocationExpr.langLibInvocation) { find(invocationExpr.expr); } find(invocationExpr.annAttachments); find(invocationExpr.argExprs); if (!invocationExpr.pkgAlias.value.isEmpty() && invocationExpr.symbol != null) { addIfSameSymbol(invocationExpr.symbol.owner, invocationExpr.pkgAlias.pos); } addIfSameSymbol(invocationExpr.symbol, invocationExpr.name.pos); } @Override public void visit(BLangTypeInit typeInit) { find(typeInit.userDefinedType); find(typeInit.argsExpr); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { find(actionInvocationExpr.expr); find(actionInvocationExpr.requiredArgs); find(actionInvocationExpr.annAttachments); find(actionInvocationExpr.restArgs); if (!actionInvocationExpr.pkgAlias.value.isEmpty()) { addIfSameSymbol(actionInvocationExpr.symbol.owner, actionInvocationExpr.pkgAlias.pos); } addIfSameSymbol(actionInvocationExpr.symbol, actionInvocationExpr.name.pos); } @Override public void visit(BLangTernaryExpr ternaryExpr) { find(ternaryExpr.expr); find(ternaryExpr.thenExpr); find(ternaryExpr.elseExpr); } @Override public void visit(BLangWaitExpr waitExpr) { find(waitExpr.exprList); } @Override public void visit(BLangTrapExpr trapExpr) { find(trapExpr.expr); } @Override public void visit(BLangBinaryExpr binaryExpr) { find(binaryExpr.lhsExpr); find(binaryExpr.rhsExpr); } @Override public void visit(BLangElvisExpr elvisExpr) { find(elvisExpr.lhsExpr); find(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { find(groupExpr.expression); } @Override public void visit(BLangLetExpression letExpr) { for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) { find((BLangNode) letVarDeclaration.definitionNode); } find(letExpr.expr); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { find(listConstructorExpr.exprs); } @Override public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) { find(spreadOpExpr.expr); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { find(tableConstructorExpr.recordLiteralList); find(tableConstructorExpr.tableKeySpecifier); } @Override public void visit(BLangUnaryExpr unaryExpr) { find(unaryExpr.expr); } @Override public void visit(BLangTypedescExpr typedescExpr) { find(typedescExpr.typeNode); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { find(conversionExpr.annAttachments); find(conversionExpr.typeNode); find(conversionExpr.expr); } @Override public void visit(BLangXMLQName xmlQName) { addIfSameSymbol(xmlQName.nsSymbol, xmlQName.prefix.pos); } @Override public void visit(BLangXMLAttribute xmlAttribute) { find(xmlAttribute.name); find(xmlAttribute.value); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { find(xmlElementLiteral.startTagName); find(xmlElementLiteral.endTagName); find(xmlElementLiteral.children); find(xmlElementLiteral.attributes); find(xmlElementLiteral.inlineNamespaces); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { find(xmlTextLiteral.textFragments); find(xmlTextLiteral.concatExpr); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { find(xmlCommentLiteral.textFragments); find(xmlCommentLiteral.concatExpr); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { find(xmlProcInsLiteral.target); find(xmlProcInsLiteral.dataFragments); find(xmlProcInsLiteral.dataConcatExpr); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { find(xmlQuotedString.textFragments); find(xmlQuotedString.concatExpr); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { find(stringTemplateLiteral.exprs); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { find(rawTemplateLiteral.insertions); find(rawTemplateLiteral.strings); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { find(bLangLambdaFunction.function); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { find(bLangArrowFunction.params); find(bLangArrowFunction.body); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { find(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { find(bLangNamedArgsExpression.expr); addIfSameSymbol(bLangNamedArgsExpression.varSymbol, bLangNamedArgsExpression.name.pos); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { find(assignableExpr.lhsExpr); find(assignableExpr.typeNode); } @Override public void visit(BLangCheckedExpr checkedExpr) { find(checkedExpr.expr); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { find(checkPanickedExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { find(serviceConstructorExpr.serviceNode); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { find(typeTestExpr.expr); find(typeTestExpr.typeNode); } @Override public void visit(BLangIsLikeExpr typeTestExpr) { find(typeTestExpr.expr); find(typeTestExpr.typeNode); } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { find(annotAccessExpr.expr); addIfSameSymbol(annotAccessExpr.annotationSymbol, annotAccessExpr.annotationName.pos); } @Override public void visit(BLangQueryExpr queryExpr) { find(queryExpr.queryClauseList); } @Override public void visit(BLangObjectConstructorExpression objConstructor) { find(objConstructor.classNode); } @Override public void visit(BLangArrayType arrayType) { find(arrayType.elemtype); for (BLangExpression size : arrayType.sizes) { find(size); } } @Override public void visit(BLangConstrainedType constrainedType) { find(constrainedType.type); find(constrainedType.constraint); } @Override public void visit(BLangStreamType streamType) { find(streamType.constraint); find(streamType.error); } @Override public void visit(BLangTableTypeNode tableType) { find(tableType.constraint); find(tableType.tableKeySpecifier); find(tableType.tableKeyTypeConstraint); } @Override public void visit(BLangUserDefinedType userDefinedType) { if (userDefinedType.symbol == null) { return; } if (!userDefinedType.pkgAlias.value.isEmpty()) { addIfSameSymbol(userDefinedType.symbol.owner, userDefinedType.pkgAlias.pos); } addIfSameSymbol(userDefinedType.symbol, userDefinedType.typeName.pos); } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { find(functionTypeNode.params); find(functionTypeNode.restParam); find(functionTypeNode.returnTypeNode); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { find(unionTypeNode.memberTypeNodes); } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { find(intersectionTypeNode.constituentTypeNodes); } @Override public void visit(BLangObjectTypeNode objectTypeNode) { find(objectTypeNode.fields); find(objectTypeNode.functions); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { find(recordTypeNode.typeRefs); find(recordTypeNode.fields); find(recordTypeNode.restFieldType); } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { find(finiteTypeNode.valueSpace); } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { find(tupleTypeNode.memberTypeNodes); find(tupleTypeNode.restParamType); } @Override public void visit(BLangErrorType errorType) { find(errorType.detailType); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { find(errorConstructorExpr.errorTypeRef); find(errorConstructorExpr.positionalArgs); find(errorConstructorExpr.namedArgs); } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { find(bLangXMLSequenceLiteral.xmlItems); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { find(bLangTupleVariable.annAttachments); find(bLangTupleVariable.typeNode); find(bLangTupleVariable.memberVariables); find(bLangTupleVariable.restVariable); find(bLangTupleVariable.expr); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { find(bLangTupleVariableDef.var); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { find(bLangRecordVariable.annAttachments); find(bLangRecordVariable.typeNode); for (BLangRecordVariable.BLangRecordVariableKeyValue variableKeyValue : bLangRecordVariable.variableList) { find(variableKeyValue.valueBindingPattern); } find(bLangRecordVariable.expr); find(bLangRecordVariable.restParam); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { find(bLangRecordVariableDef.var); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { find(bLangErrorVariable.annAttachments); find(bLangErrorVariable.typeNode); find(bLangErrorVariable.message); find(bLangErrorVariable.restDetail); find(bLangErrorVariable.cause); find(bLangErrorVariable.reasonMatchConst); find(bLangErrorVariable.expr); for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : bLangErrorVariable.detail) { find(errorDetailEntry.valueBindingPattern); addIfSameSymbol(errorDetailEntry.keySymbol, errorDetailEntry.key.pos); } } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { find(bLangErrorVariableDef.errorVariable); } @Override @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { find(syncSendExpr.expr); addIfSameSymbol(syncSendExpr.workerSymbol, syncSendExpr.workerIdentifier.pos); } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { find(waitForAllExpr.keyValuePairs); } @Override public void visit(BLangRecordLiteral.BLangRecordKeyValueField recordKeyValue) { find(recordKeyValue.key); find(recordKeyValue.valueExpr); } @Override public void visit(BLangRecordLiteral.BLangRecordKey recordKey) { find(recordKey.expr); addIfSameSymbol(recordKey.fieldSymbol, recordKey.pos); } @Override public void visit(BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField) { find(spreadOperatorField.expr); } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { find(waitKeyValue.keyExpr); find(waitKeyValue.valueExpr); } @Override public void visit(BLangXMLElementFilter xmlElementFilter) { addIfSameSymbol(xmlElementFilter.namespaceSymbol, xmlElementFilter.nsPos); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { find(xmlElementAccess.expr); find(xmlElementAccess.filters); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { find(xmlNavigation.childIndex); find(xmlNavigation.filters); find(xmlNavigation.expr); } @Override public void visit(BLangClassDefinition classDefinition) { find(classDefinition.annAttachments); find(classDefinition.fields); find(classDefinition.initFunction); find(classDefinition.functions); find(classDefinition.typeRefs); addIfSameSymbol(classDefinition.symbol, classDefinition.name.pos); } @Override public void visit(BLangListMatchPattern listMatchPattern) { find(listMatchPattern.matchPatterns); find(listMatchPattern.restMatchPattern); } @Override public void visit(BLangMappingMatchPattern mappingMatchPattern) { find(mappingMatchPattern.fieldMatchPatterns); find(mappingMatchPattern.restMatchPattern); } @Override public void visit(BLangFieldMatchPattern fieldMatchPattern) { find(fieldMatchPattern.matchPattern); } @Override public void visit(BLangRestMatchPattern restMatchPattern) { addIfSameSymbol(restMatchPattern.symbol, restMatchPattern.variableName.pos); } @Override public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) { find(resourceAccessInvocation.expr); find(resourceAccessInvocation.requiredArgs); find(resourceAccessInvocation.annAttachments); find(resourceAccessInvocation.restArgs); find(resourceAccessInvocation.resourceAccessPathSegments); if (!resourceAccessInvocation.pkgAlias.value.isEmpty()) { addIfSameSymbol(resourceAccessInvocation.symbol.owner, resourceAccessInvocation.pkgAlias.pos); } addIfSameSymbol(resourceAccessInvocation.symbol, resourceAccessInvocation.resourceAccessPathSegments.pos); } private void visitNamedArgWithoutAddingSymbol(List<BLangNamedArgsExpression> args) { for (BLangNamedArgsExpression arg : args) { find(arg.expr); } } private boolean addIfSameSymbol(BSymbol symbol, Location location) { if (symbol != null && this.targetSymbol.name.equals(symbol.name) && this.targetSymbol.pkgID.equals(symbol.pkgID) && this.targetSymbol.pos.equals(symbol.pos) && (this.withDefinition || !symbol.pos.equals(location))) { this.referenceLocations.add(location); return true; } return false; } private boolean isGeneratedClassDefForService(BLangClassDefinition clazz) { return clazz.flagSet.contains(Flag.ANONYMOUS) && clazz.flagSet.contains(Flag.SERVICE); } /** * This method is intended to be used for getting the location of a string value with the surrounding quotes * disregarded. If we give the original location, it'd be problematic for use cases such as renaming since we only * return a list of locations of references. Without further contextual info, it'll be hard to determine whether a * particular reference location is a string value. * * @param location Location of the string * @return The modified location with the quotes diregarded */ private Location getLocationForLiteral(Location location) { LineRange lineRange = location.lineRange(); return new BLangDiagnosticLocation(lineRange.filePath(), lineRange.startLine().line(), lineRange.endLine().line(), lineRange.startLine().offset() + 1, lineRange.endLine().offset() - 1, location.textRange().startOffset(), location.textRange().length()); } }
Ok, never mind. I was thinking to avoid copying in and copying out of `memo` multiple times. Let's refactor this when we have more similar cases in the future.
private Memo rewrite(Plan plan) { Plan normalizedPlan = PlanRewriter.topDownRewrite(plan, new ConnectContext(), new NormalizeExpressions()); return PlanRewriter.topDownRewriteMemo(normalizedPlan, new ConnectContext(), new PushPredicateThroughJoin()); }
Plan normalizedPlan = PlanRewriter.topDownRewrite(plan, new ConnectContext(), new NormalizeExpressions());
private Memo rewrite(Plan plan) { Plan normalizedPlan = PlanRewriter.topDownRewrite(plan, new ConnectContext(), new ExpressionNormalization()); return PlanRewriter.topDownRewriteMemo(normalizedPlan, new ConnectContext(), new PushPredicateThroughJoin()); }
class PushDownPredicateTest { private Table student; private Table score; private Table course; private Plan rStudent; private Plan rScore; private Plan rCourse; /** * ut before. */ @BeforeAll public final void beforeAll() { student = new Table(0L, "student", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("id", Type.INT, true, AggregateType.NONE, "0", ""), new Column("name", Type.STRING, true, AggregateType.NONE, "", ""), new Column("age", Type.INT, true, AggregateType.NONE, "", ""))); score = new Table(0L, "score", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("sid", Type.INT, true, AggregateType.NONE, "0", ""), new Column("cid", Type.INT, true, AggregateType.NONE, "", ""), new Column("grade", Type.DOUBLE, true, AggregateType.NONE, "", ""))); course = new Table(0L, "course", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("cid", Type.INT, true, AggregateType.NONE, "0", ""), new Column("name", Type.STRING, true, AggregateType.NONE, "", ""), new Column("teacher", Type.STRING, true, AggregateType.NONE, "", ""))); rStudent = new LogicalOlapScan(student, ImmutableList.of("student")); rScore = new LogicalOlapScan(score, ImmutableList.of("score")); rCourse = new LogicalOlapScan(course, ImmutableList.of("course")); } @Test public void pushDownPredicateIntoScanTest1() { Expression onCondition1 = new EqualTo(rStudent.getOutput().get(0), rScore.getOutput().get(0)); Expression onCondition2 = new GreaterThan(rStudent.getOutput().get(0), Literal.of(1)); Expression onCondition3 = new GreaterThan(rScore.getOutput().get(0), Literal.of(2)); Expression onCondition = ExpressionUtils.and(onCondition1, onCondition2, onCondition3); Expression whereCondition1 = new GreaterThan(rStudent.getOutput().get(1), Literal.of(18)); Expression whereCondition2 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.of(onCondition), rStudent, rScore); Plan filter = new LogicalFilter(whereCondition, join); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(1).getLogicalExpression() .getPlan(); Assertions.assertTrue(op1 instanceof LogicalJoin); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertTrue(op3 instanceof LogicalFilter); LogicalJoin join1 = (LogicalJoin) op1; LogicalFilter filter1 = (LogicalFilter) op2; LogicalFilter filter2 = (LogicalFilter) op3; Assertions.assertEquals(onCondition1, join1.getCondition().get()); Assertions.assertEquals(ExpressionUtils.and(onCondition2, whereCondition1), filter1.getPredicates()); Assertions.assertEquals(ExpressionUtils.and(onCondition3, whereCondition2), filter2.getPredicates()); } @Test public void pushDownPredicateIntoScanTest3() { Expression whereCondition1 = new EqualTo(new Add(rStudent.getOutput().get(0), Literal.of(1)), new Subtract(rScore.getOutput().get(0), Literal.of(2))); Expression whereCondition2 = new GreaterThan(rStudent.getOutput().get(1), Literal.of(18)); Expression whereCondition3 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2, whereCondition3); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), rStudent, rScore); Plan filter = new LogicalFilter(whereCondition, join); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(1).getLogicalExpression() .getPlan(); Assertions.assertTrue(op1 instanceof LogicalJoin); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertTrue(op3 instanceof LogicalFilter); LogicalJoin join1 = (LogicalJoin) op1; LogicalFilter filter1 = (LogicalFilter) op2; LogicalFilter filter2 = (LogicalFilter) op3; Assertions.assertEquals(whereCondition1, join1.getCondition().get()); Assertions.assertEquals(whereCondition2, filter1.getPredicates()); Assertions.assertEquals(whereCondition3, filter2.getPredicates()); } @Test public void pushDownPredicateIntoScanTest4() { /* select student.name, course.name, score.grade from student,score,course where on student.id = score.sid and student.age between 18 and 20 and score.grade > 60 and student.id = score.sid */ Expression whereCondition1 = new EqualTo(rStudent.getOutput().get(0), rScore.getOutput().get(0)); Expression whereCondition2 = new EqualTo(rScore.getOutput().get(1), rCourse.getOutput().get(0)); Expression whereCondition3 = new Between(rStudent.getOutput().get(2), Literal.of(18), Literal.of(20)); Expression whereCondition3result = new And( new GreaterThanEqual(rStudent.getOutput().get(2), Literal.of(18)), new LessThanEqual(rStudent.getOutput().get(2), Literal.of(20))); Expression whereCondition4 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2, whereCondition3, whereCondition4); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), rStudent, rScore); Plan join1 = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), join, rCourse); Plan filter = new LogicalFilter(whereCondition, join1); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan join2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan join3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .child(1).getLogicalExpression().getPlan(); Assertions.assertTrue(join2 instanceof LogicalJoin); Assertions.assertTrue(join3 instanceof LogicalJoin); Assertions.assertTrue(op1 instanceof LogicalFilter); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertEquals(whereCondition2, ((LogicalJoin) join2).getCondition().get()); Assertions.assertEquals(whereCondition1, ((LogicalJoin) join3).getCondition().get()); Assertions.assertEquals(whereCondition3result.toSql(), ((LogicalFilter) op1).getPredicates().toSql()); Assertions.assertEquals(whereCondition4, ((LogicalFilter) op2).getPredicates()); } }
class PushDownPredicateTest { private Table student; private Table score; private Table course; private Plan rStudent; private Plan rScore; private Plan rCourse; /** * ut before. */ @BeforeAll public final void beforeAll() { student = new Table(0L, "student", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("id", Type.INT, true, AggregateType.NONE, "0", ""), new Column("name", Type.STRING, true, AggregateType.NONE, "", ""), new Column("age", Type.INT, true, AggregateType.NONE, "", ""))); score = new Table(0L, "score", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("sid", Type.INT, true, AggregateType.NONE, "0", ""), new Column("cid", Type.INT, true, AggregateType.NONE, "", ""), new Column("grade", Type.DOUBLE, true, AggregateType.NONE, "", ""))); course = new Table(0L, "course", Table.TableType.OLAP, ImmutableList.<Column>of(new Column("cid", Type.INT, true, AggregateType.NONE, "0", ""), new Column("name", Type.STRING, true, AggregateType.NONE, "", ""), new Column("teacher", Type.STRING, true, AggregateType.NONE, "", ""))); rStudent = new LogicalOlapScan(student, ImmutableList.of("student")); rScore = new LogicalOlapScan(score, ImmutableList.of("score")); rCourse = new LogicalOlapScan(course, ImmutableList.of("course")); } @Test public void pushDownPredicateIntoScanTest1() { Expression onCondition1 = new EqualTo(rStudent.getOutput().get(0), rScore.getOutput().get(0)); Expression onCondition2 = new GreaterThan(rStudent.getOutput().get(0), Literal.of(1)); Expression onCondition3 = new GreaterThan(rScore.getOutput().get(0), Literal.of(2)); Expression onCondition = ExpressionUtils.and(onCondition1, onCondition2, onCondition3); Expression whereCondition1 = new GreaterThan(rStudent.getOutput().get(1), Literal.of(18)); Expression whereCondition2 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.of(onCondition), rStudent, rScore); Plan filter = new LogicalFilter(whereCondition, join); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(1).getLogicalExpression() .getPlan(); Assertions.assertTrue(op1 instanceof LogicalJoin); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertTrue(op3 instanceof LogicalFilter); LogicalJoin join1 = (LogicalJoin) op1; LogicalFilter filter1 = (LogicalFilter) op2; LogicalFilter filter2 = (LogicalFilter) op3; Assertions.assertEquals(onCondition1, join1.getCondition().get()); Assertions.assertEquals(ExpressionUtils.and(onCondition2, whereCondition1), filter1.getPredicates()); Assertions.assertEquals(ExpressionUtils.and(onCondition3, whereCondition2), filter2.getPredicates()); } @Test public void pushDownPredicateIntoScanTest3() { Expression whereCondition1 = new EqualTo(new Add(rStudent.getOutput().get(0), Literal.of(1)), new Subtract(rScore.getOutput().get(0), Literal.of(2))); Expression whereCondition2 = new GreaterThan(rStudent.getOutput().get(1), Literal.of(18)); Expression whereCondition3 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2, whereCondition3); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), rStudent, rScore); Plan filter = new LogicalFilter(whereCondition, join); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(1).getLogicalExpression() .getPlan(); Assertions.assertTrue(op1 instanceof LogicalJoin); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertTrue(op3 instanceof LogicalFilter); LogicalJoin join1 = (LogicalJoin) op1; LogicalFilter filter1 = (LogicalFilter) op2; LogicalFilter filter2 = (LogicalFilter) op3; Assertions.assertEquals(whereCondition1, join1.getCondition().get()); Assertions.assertEquals(whereCondition2, filter1.getPredicates()); Assertions.assertEquals(whereCondition3, filter2.getPredicates()); } @Test public void pushDownPredicateIntoScanTest4() { /* select student.name, course.name, score.grade from student,score,course where on student.id = score.sid and student.age between 18 and 20 and score.grade > 60 and student.id = score.sid */ Expression whereCondition1 = new EqualTo(rStudent.getOutput().get(0), rScore.getOutput().get(0)); Expression whereCondition2 = new EqualTo(rScore.getOutput().get(1), rCourse.getOutput().get(0)); Expression whereCondition3 = new Between(rStudent.getOutput().get(2), Literal.of(18), Literal.of(20)); Expression whereCondition3result = new And( new GreaterThanEqual(rStudent.getOutput().get(2), Literal.of(18)), new LessThanEqual(rStudent.getOutput().get(2), Literal.of(20))); Expression whereCondition4 = new GreaterThan(rScore.getOutput().get(2), Literal.of(60)); Expression whereCondition = ExpressionUtils.and(whereCondition1, whereCondition2, whereCondition3, whereCondition4); Plan join = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), rStudent, rScore); Plan join1 = new LogicalJoin(JoinType.INNER_JOIN, Optional.empty(), join, rCourse); Plan filter = new LogicalFilter(whereCondition, join1); Plan root = new LogicalProject( Lists.newArrayList(rStudent.getOutput().get(1), rCourse.getOutput().get(1), rScore.getOutput().get(2)), filter ); System.out.println(root.treeString()); Memo memo = rewrite(root); Group rootGroup = memo.getRoot(); System.out.println(memo.copyOut().treeString()); Plan join2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().getPlan(); Plan join3 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .getPlan(); Plan op1 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .child(0).getLogicalExpression().getPlan(); Plan op2 = rootGroup.getLogicalExpression().child(0).getLogicalExpression().child(0).getLogicalExpression() .child(1).getLogicalExpression().getPlan(); Assertions.assertTrue(join2 instanceof LogicalJoin); Assertions.assertTrue(join3 instanceof LogicalJoin); Assertions.assertTrue(op1 instanceof LogicalFilter); Assertions.assertTrue(op2 instanceof LogicalFilter); Assertions.assertEquals(whereCondition2, ((LogicalJoin) join2).getCondition().get()); Assertions.assertEquals(whereCondition1, ((LogicalJoin) join3).getCondition().get()); Assertions.assertEquals(whereCondition3result.toSql(), ((LogicalFilter) op1).getPredicates().toSql()); Assertions.assertEquals(whereCondition4, ((LogicalFilter) op2).getPredicates()); } }
Well, the purpose of the fix was deleting files. As there haven't been any test for that so far I added them to show that now all files are deleted properly with HS cleaning feature enabled.
private void runArchiveExpirationTest(boolean cleanupExpiredJobs) throws Exception { int numExpiredJobs = cleanupExpiredJobs ? 1 : 0; int numJobs = 3; for (int x = 0; x < numJobs; x++) { runJob(); } waitForArchivesCreation(numJobs); CountDownLatch numExpectedArchivedJobs = new CountDownLatch(numJobs); CountDownLatch numExpectedExpiredJobs = new CountDownLatch(numExpiredJobs); Configuration historyServerConfig = createTestConfiguration(cleanupExpiredJobs); HistoryServer hs = new HistoryServer( historyServerConfig, (event) -> { switch (event.getType()){ case CREATED: numExpectedArchivedJobs.countDown(); break; case DELETED: numExpectedExpiredJobs.countDown(); break; } }); try { hs.start(); String baseUrl = "http: assertTrue(numExpectedArchivedJobs.await(10L, TimeUnit.SECONDS)); Collection<JobDetails> jobs = getJobsOverview(baseUrl).getJobs(); Assert.assertEquals(numJobs, jobs.size()); String jobIdToDelete = jobs.stream() .findFirst() .map(JobDetails::getJobId) .map(JobID::toString) .orElseThrow(() -> new IllegalStateException("Expected at least one existing job")); assertHSFilesExistence(jobIdToDelete, true); Files.deleteIfExists(jmDirectory.toPath().resolve(jobIdToDelete)); assertTrue(numExpectedExpiredJobs.await(10L, TimeUnit.SECONDS)); Collection<JobDetails> jobsAfterDeletion = getJobsOverview(baseUrl).getJobs(); Assert.assertEquals(numJobs - numExpiredJobs, jobsAfterDeletion.size()); Assert.assertEquals(1 - numExpiredJobs, jobsAfterDeletion.stream() .map(JobDetails::getJobId) .map(JobID::toString) .filter(jobId -> jobId.equals(jobIdToDelete)) .count()); assertHSFilesExistence(jobIdToDelete, !cleanupExpiredJobs); } finally { hs.stop(); } }
assertHSFilesExistence(jobIdToDelete, !cleanupExpiredJobs);
private void runArchiveExpirationTest(boolean cleanupExpiredJobs) throws Exception { int numExpiredJobs = cleanupExpiredJobs ? 1 : 0; int numJobs = 3; for (int x = 0; x < numJobs; x++) { runJob(); } waitForArchivesCreation(numJobs); CountDownLatch numExpectedArchivedJobs = new CountDownLatch(numJobs); CountDownLatch firstArchiveExpiredLatch = new CountDownLatch(numExpiredJobs); CountDownLatch allArchivesExpiredLatch = new CountDownLatch(cleanupExpiredJobs ? numJobs : 0); Configuration historyServerConfig = createTestConfiguration(cleanupExpiredJobs); HistoryServer hs = new HistoryServer( historyServerConfig, (event) -> { switch (event.getType()){ case CREATED: numExpectedArchivedJobs.countDown(); break; case DELETED: firstArchiveExpiredLatch.countDown(); allArchivesExpiredLatch.countDown(); break; } }); try { hs.start(); String baseUrl = "http: assertTrue(numExpectedArchivedJobs.await(10L, TimeUnit.SECONDS)); Collection<JobDetails> jobs = getJobsOverview(baseUrl).getJobs(); Assert.assertEquals(numJobs, jobs.size()); String jobIdToDelete = jobs.stream() .findFirst() .map(JobDetails::getJobId) .map(JobID::toString) .orElseThrow(() -> new IllegalStateException("Expected at least one existing job")); Files.deleteIfExists(jmDirectory.toPath().resolve(jobIdToDelete)); assertTrue(firstArchiveExpiredLatch.await(10L, TimeUnit.SECONDS)); Collection<JobDetails> jobsAfterDeletion = getJobsOverview(baseUrl).getJobs(); Assert.assertEquals(numJobs - numExpiredJobs, jobsAfterDeletion.size()); Assert.assertEquals(1 - numExpiredJobs, jobsAfterDeletion.stream() .map(JobDetails::getJobId) .map(JobID::toString) .filter(jobId -> jobId.equals(jobIdToDelete)) .count()); List<String> remainingJobIds = jobsAfterDeletion.stream() .map(JobDetails::getJobId) .map(JobID::toString) .collect(Collectors.toList()); for (String remainingJobId : remainingJobIds) { Files.deleteIfExists(jmDirectory.toPath().resolve(remainingJobId)); } assertTrue(allArchivesExpiredLatch.await(10L, TimeUnit.SECONDS)); assertJobFilesCleanedUp(cleanupExpiredJobs); } finally { hs.stop(); } }
class HistoryServerTest extends TestLogger { private static final JsonFactory JACKSON_FACTORY = new JsonFactory() .enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) .disable(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .enable(DeserializationFeature.FAIL_ON_MISSING_CREATOR_PROPERTIES); @Rule public final TemporaryFolder tmpFolder = new TemporaryFolder(); private MiniClusterWithClientResource cluster; private File jmDirectory; private File hsDirectory; @Parameterized.Parameters(name = "Flink version less than 1.4: {0}") public static Collection<Boolean> parameters() { return Arrays.asList(true, false); } @Parameterized.Parameter public static boolean versionLessThan14; @Before public void setUp() throws Exception { jmDirectory = tmpFolder.newFolder("jm_" + versionLessThan14); hsDirectory = tmpFolder.newFolder("hs_" + versionLessThan14); Configuration clusterConfig = new Configuration(); clusterConfig.setString(JobManagerOptions.ARCHIVE_DIR, jmDirectory.toURI().toString()); cluster = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(clusterConfig) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(1) .build()); cluster.before(); } @After public void tearDown() { if (cluster != null) { cluster.after(); } } @Test public void testHistoryServerIntegration() throws Exception { final int numJobs = 2; for (int x = 0; x < numJobs; x++) { runJob(); } final int numLegacyJobs = 1; createLegacyArchive(jmDirectory.toPath()); waitForArchivesCreation(numJobs + numLegacyJobs); CountDownLatch numExpectedArchivedJobs = new CountDownLatch(numJobs + numLegacyJobs); Configuration historyServerConfig = createTestConfiguration(false); HistoryServer hs = new HistoryServer(historyServerConfig, (event) -> { if (event.getType() == HistoryServerArchiveFetcher.ArchiveEventType.CREATED) { numExpectedArchivedJobs.countDown(); } }); try { hs.start(); String baseUrl = "http: assertTrue(numExpectedArchivedJobs.await(10L, TimeUnit.SECONDS)); Assert.assertEquals(numJobs + numLegacyJobs, getJobsOverview(baseUrl).getJobs().size()); getDashboardConfiguration(baseUrl); } finally { hs.stop(); } } @Test public void testCleanExpiredJob() throws Exception { runArchiveExpirationTest(true); } @Test public void testRemainExpiredJob() throws Exception { runArchiveExpirationTest(false); } private void assertHSFilesExistence(String jobId, boolean fileExists){ Assert.assertEquals(fileExists, Files.exists(hsDirectory.toPath().resolve("jobs").resolve(jobId))); Assert.assertEquals(fileExists, Files.isDirectory(hsDirectory.toPath().resolve("jobs").resolve(jobId))); Assert.assertEquals(fileExists, Files.exists(hsDirectory.toPath().resolve("jobs").resolve(jobId + JSON_FILE_ENDING))); Assert.assertEquals(fileExists, Files.exists(hsDirectory.toPath().resolve("overviews").resolve(jobId + JSON_FILE_ENDING))); } private void waitForArchivesCreation(int numJobs) throws InterruptedException { File[] archives = jmDirectory.listFiles(); while (archives == null || archives.length != numJobs) { Thread.sleep(50); archives = jmDirectory.listFiles(); } } private Configuration createTestConfiguration(boolean cleanupExpiredJobs) { Configuration historyServerConfig = new Configuration(); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS, jmDirectory.toURI().toString()); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_WEB_DIR, hsDirectory.getAbsolutePath()); historyServerConfig.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 100L); historyServerConfig.setBoolean(HistoryServerOptions.HISTORY_SERVER_CLEANUP_EXPIRED_JOBS, cleanupExpiredJobs); historyServerConfig.setInteger(HistoryServerOptions.HISTORY_SERVER_WEB_PORT, 0); return historyServerConfig; } private static DashboardConfiguration getDashboardConfiguration(String baseUrl) throws Exception { Tuple2<Integer, String> response = getFromHTTP(baseUrl + DashboardConfigurationHeaders.INSTANCE.getTargetRestEndpointURL()); return OBJECT_MAPPER.readValue(response.f1, DashboardConfiguration.class); } private static MultipleJobsDetails getJobsOverview(String baseUrl) throws Exception { Tuple2<Integer, String> response = getFromHTTP(baseUrl + JobsOverviewHeaders.URL); return OBJECT_MAPPER.readValue(response.f1, MultipleJobsDetails.class); } private static void runJob() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.fromElements(1, 2, 3).addSink(new DiscardingSink<>()); env.execute(); } static Tuple2<Integer, String> getFromHTTP(String url) throws Exception { URL u = new URL(url); HttpURLConnection connection = (HttpURLConnection) u.openConnection(); connection.setConnectTimeout(100000); connection.connect(); InputStream is; if (connection.getResponseCode() >= 400) { is = connection.getErrorStream(); } else { is = connection.getInputStream(); } return Tuple2.of(connection.getResponseCode(), IOUtils.toString(is, connection.getContentEncoding() != null ? connection.getContentEncoding() : "UTF-8")); } private static String createLegacyArchive(Path directory) throws IOException { JobID jobId = JobID.generate(); StringWriter sw = new StringWriter(); try (JsonGenerator gen = JACKSON_FACTORY.createGenerator(sw)) { try (JsonObject root = new JsonObject(gen)) { try (JsonArray finished = new JsonArray(gen, "finished")) { try (JsonObject job = new JsonObject(gen)) { gen.writeStringField("jid", jobId.toString()); gen.writeStringField("name", "testjob"); gen.writeStringField("state", JobStatus.FINISHED.name()); gen.writeNumberField("start-time", 0L); gen.writeNumberField("end-time", 1L); gen.writeNumberField("duration", 1L); gen.writeNumberField("last-modification", 1L); try (JsonObject tasks = new JsonObject(gen, "tasks")) { gen.writeNumberField("total", 0); if (versionLessThan14) { gen.writeNumberField("pending", 0); } else { gen.writeNumberField("created", 0); gen.writeNumberField("deploying", 0); gen.writeNumberField("scheduled", 0); } gen.writeNumberField("running", 0); gen.writeNumberField("finished", 0); gen.writeNumberField("canceling", 0); gen.writeNumberField("canceled", 0); gen.writeNumberField("failed", 0); } } } } } String json = sw.toString(); ArchivedJson archivedJson = new ArchivedJson("/joboverview", json); FsJobArchivist.archiveJob(new org.apache.flink.core.fs.Path(directory.toUri()), jobId, Collections.singleton(archivedJson)); return jobId.toString(); } private static final class JsonObject implements AutoCloseable { private final JsonGenerator gen; JsonObject(JsonGenerator gen) throws IOException { this.gen = gen; gen.writeStartObject(); } private JsonObject(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeObjectFieldStart(name); } @Override public void close() throws IOException { gen.writeEndObject(); } } private static final class JsonArray implements AutoCloseable { private final JsonGenerator gen; JsonArray(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeArrayFieldStart(name); } @Override public void close() throws IOException { gen.writeEndArray(); } } }
class HistoryServerTest extends TestLogger { private static final JsonFactory JACKSON_FACTORY = new JsonFactory() .enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) .disable(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() .enable(DeserializationFeature.FAIL_ON_MISSING_CREATOR_PROPERTIES); @Rule public final TemporaryFolder tmpFolder = new TemporaryFolder(); private MiniClusterWithClientResource cluster; private File jmDirectory; private File hsDirectory; @Parameterized.Parameters(name = "Flink version less than 1.4: {0}") public static Collection<Boolean> parameters() { return Arrays.asList(true, false); } @Parameterized.Parameter public static boolean versionLessThan14; @Before public void setUp() throws Exception { jmDirectory = tmpFolder.newFolder("jm_" + versionLessThan14); hsDirectory = tmpFolder.newFolder("hs_" + versionLessThan14); Configuration clusterConfig = new Configuration(); clusterConfig.setString(JobManagerOptions.ARCHIVE_DIR, jmDirectory.toURI().toString()); cluster = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(clusterConfig) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(1) .build()); cluster.before(); } @After public void tearDown() { if (cluster != null) { cluster.after(); } } @Test public void testHistoryServerIntegration() throws Exception { final int numJobs = 2; final int numLegacyJobs = 1; CountDownLatch numExpectedArchivedJobs = new CountDownLatch(numJobs + numLegacyJobs); Configuration historyServerConfig = createTestConfiguration(false); HistoryServer hs = new HistoryServer(historyServerConfig, (event) -> { if (event.getType() == HistoryServerArchiveFetcher.ArchiveEventType.CREATED) { numExpectedArchivedJobs.countDown(); } }); try { hs.start(); String baseUrl = "http: Assert.assertEquals(0, getJobsOverview(baseUrl).getJobs().size()); for (int x = 0; x < numJobs; x++) { runJob(); } createLegacyArchive(jmDirectory.toPath()); waitForArchivesCreation(numJobs + numLegacyJobs); assertTrue(numExpectedArchivedJobs.await(10L, TimeUnit.SECONDS)); Assert.assertEquals(numJobs + numLegacyJobs, getJobsOverview(baseUrl).getJobs().size()); getDashboardConfiguration(baseUrl); } finally { hs.stop(); } } @Test public void testRemoveOldestModifiedArchivesBeyondHistorySizeLimit() throws Exception { final int numArchivesToKeepInHistory = 2; final int numArchivesBeforeHsStarted = 4; final int numArchivesAfterHsStarted = 2; final int numArchivesToRemoveUponHsStart = numArchivesBeforeHsStarted - numArchivesToKeepInHistory; final long oneMinuteSinceEpoch = 1000L * 60L; List<String> expectedJobIdsToKeep = new LinkedList<>(); for (int j = 0; j < numArchivesBeforeHsStarted; j++) { String jobId = createLegacyArchive(jmDirectory.toPath(), j * oneMinuteSinceEpoch); if (j >= numArchivesToRemoveUponHsStart){ expectedJobIdsToKeep.add(jobId); } } CountDownLatch numArchivesCreatedInitially = new CountDownLatch(numArchivesToKeepInHistory); CountDownLatch numArchivesDeletedInitially = new CountDownLatch(numArchivesToRemoveUponHsStart); CountDownLatch numArchivesCreatedTotal = new CountDownLatch(numArchivesBeforeHsStarted - numArchivesToRemoveUponHsStart + numArchivesAfterHsStarted); CountDownLatch numArchivesDeletedTotal = new CountDownLatch(numArchivesToRemoveUponHsStart + numArchivesAfterHsStarted); Configuration historyServerConfig = createTestConfiguration(HistoryServerOptions.HISTORY_SERVER_CLEANUP_EXPIRED_JOBS.defaultValue()); historyServerConfig.set(HistoryServerOptions.HISTORY_SERVER_RETAINED_JOBS, numArchivesToKeepInHistory); HistoryServer hs = new HistoryServer(historyServerConfig, (event) -> { switch (event.getType()){ case CREATED: numArchivesCreatedInitially.countDown(); numArchivesCreatedTotal.countDown(); break; case DELETED: numArchivesDeletedInitially.countDown(); numArchivesDeletedTotal.countDown(); break; } }); try { hs.start(); String baseUrl = "http: assertTrue(numArchivesCreatedInitially.await(10L, TimeUnit.SECONDS)); assertTrue(numArchivesDeletedInitially.await(10L, TimeUnit.SECONDS)); Assert.assertEquals(new HashSet<>(expectedJobIdsToKeep), getIdsFromJobOverview(baseUrl)); for (int j = numArchivesBeforeHsStarted; j < numArchivesBeforeHsStarted + numArchivesAfterHsStarted; j++) { expectedJobIdsToKeep.remove(0); expectedJobIdsToKeep.add(createLegacyArchive(jmDirectory.toPath(), j * oneMinuteSinceEpoch)); } assertTrue(numArchivesCreatedTotal.await(10L, TimeUnit.SECONDS)); assertTrue(numArchivesDeletedTotal.await(10L, TimeUnit.SECONDS)); Assert.assertEquals(new HashSet<>(expectedJobIdsToKeep), getIdsFromJobOverview(baseUrl)); } finally { hs.stop(); } } private Set<String> getIdsFromJobOverview(String baseUrl) throws Exception { return getJobsOverview(baseUrl).getJobs().stream() .map(JobDetails::getJobId) .map(JobID::toString) .collect(Collectors.toSet()); } @Test(expected = IllegalConfigurationException.class) public void testFailIfHistorySizeLimitIsZero() throws Exception { startHistoryServerWithSizeLimit(0); } @Test(expected = IllegalConfigurationException.class) public void testFailIfHistorySizeLimitIsLessThanMinusOne() throws Exception { startHistoryServerWithSizeLimit(-2); } private void startHistoryServerWithSizeLimit(int maxHistorySize) throws IOException, FlinkException, InterruptedException { Configuration historyServerConfig = createTestConfiguration(HistoryServerOptions.HISTORY_SERVER_CLEANUP_EXPIRED_JOBS.defaultValue()); historyServerConfig.setInteger(HistoryServerOptions.HISTORY_SERVER_RETAINED_JOBS, maxHistorySize); new HistoryServer(historyServerConfig).start(); } @Test public void testCleanExpiredJob() throws Exception { runArchiveExpirationTest(true); } @Test public void testRemainExpiredJob() throws Exception { runArchiveExpirationTest(false); } private void assertJobFilesCleanedUp(boolean jobFilesShouldBeDeleted) throws IOException { try (Stream<Path> paths = Files.walk(hsDirectory.toPath())) { final List<Path> jobFiles = paths .filter(path -> !path.equals(hsDirectory.toPath())) .map(path -> hsDirectory.toPath().relativize(path)) .filter(path -> !path.equals(Paths.get("config.json"))) .filter(path -> !path.equals(Paths.get("jobs"))) .filter(path -> !path.equals(Paths.get("jobs", "overview.json"))) .filter(path -> !path.equals(Paths.get("overviews"))) .collect(Collectors.toList()); assertThat(jobFiles, jobFilesShouldBeDeleted ? empty() : not(empty())); } } private void waitForArchivesCreation(int numJobs) throws InterruptedException { File[] archives = jmDirectory.listFiles(); while (archives == null || archives.length != numJobs) { Thread.sleep(50); archives = jmDirectory.listFiles(); } } private Configuration createTestConfiguration(boolean cleanupExpiredJobs) { Configuration historyServerConfig = new Configuration(); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_DIRS, jmDirectory.toURI().toString()); historyServerConfig.setString(HistoryServerOptions.HISTORY_SERVER_WEB_DIR, hsDirectory.getAbsolutePath()); historyServerConfig.setLong(HistoryServerOptions.HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL, 100L); historyServerConfig.setBoolean(HistoryServerOptions.HISTORY_SERVER_CLEANUP_EXPIRED_JOBS, cleanupExpiredJobs); historyServerConfig.setInteger(HistoryServerOptions.HISTORY_SERVER_WEB_PORT, 0); return historyServerConfig; } private static DashboardConfiguration getDashboardConfiguration(String baseUrl) throws Exception { Tuple2<Integer, String> response = getFromHTTP(baseUrl + DashboardConfigurationHeaders.INSTANCE.getTargetRestEndpointURL()); return OBJECT_MAPPER.readValue(response.f1, DashboardConfiguration.class); } private static MultipleJobsDetails getJobsOverview(String baseUrl) throws Exception { Tuple2<Integer, String> response = getFromHTTP(baseUrl + JobsOverviewHeaders.URL); return OBJECT_MAPPER.readValue(response.f1, MultipleJobsDetails.class); } private static void runJob() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.fromElements(1, 2, 3).addSink(new DiscardingSink<>()); env.execute(); } static Tuple2<Integer, String> getFromHTTP(String url) throws Exception { URL u = new URL(url); HttpURLConnection connection = (HttpURLConnection) u.openConnection(); connection.setConnectTimeout(100000); connection.connect(); InputStream is; if (connection.getResponseCode() >= 400) { is = connection.getErrorStream(); } else { is = connection.getInputStream(); } return Tuple2.of(connection.getResponseCode(), IOUtils.toString(is, connection.getContentEncoding() != null ? connection.getContentEncoding() : "UTF-8")); } private static String createLegacyArchive(Path directory, long fileModifiedDate) throws IOException { String jobId = createLegacyArchive(directory); File jobArchive = directory.resolve(jobId).toFile(); jobArchive.setLastModified(fileModifiedDate); return jobId; } private static String createLegacyArchive(Path directory) throws IOException { JobID jobId = JobID.generate(); StringWriter sw = new StringWriter(); try (JsonGenerator gen = JACKSON_FACTORY.createGenerator(sw)) { try (JsonObject root = new JsonObject(gen)) { try (JsonArray finished = new JsonArray(gen, "finished")) { try (JsonObject job = new JsonObject(gen)) { gen.writeStringField("jid", jobId.toString()); gen.writeStringField("name", "testjob"); gen.writeStringField("state", JobStatus.FINISHED.name()); gen.writeNumberField("start-time", 0L); gen.writeNumberField("end-time", 1L); gen.writeNumberField("duration", 1L); gen.writeNumberField("last-modification", 1L); try (JsonObject tasks = new JsonObject(gen, "tasks")) { gen.writeNumberField("total", 0); if (versionLessThan14) { gen.writeNumberField("pending", 0); } else { gen.writeNumberField("created", 0); gen.writeNumberField("deploying", 0); gen.writeNumberField("scheduled", 0); } gen.writeNumberField("running", 0); gen.writeNumberField("finished", 0); gen.writeNumberField("canceling", 0); gen.writeNumberField("canceled", 0); gen.writeNumberField("failed", 0); } } } } } String json = sw.toString(); ArchivedJson archivedJson = new ArchivedJson("/joboverview", json); FsJobArchivist.archiveJob(new org.apache.flink.core.fs.Path(directory.toUri()), jobId, Collections.singleton(archivedJson)); return jobId.toString(); } private static final class JsonObject implements AutoCloseable { private final JsonGenerator gen; JsonObject(JsonGenerator gen) throws IOException { this.gen = gen; gen.writeStartObject(); } private JsonObject(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeObjectFieldStart(name); } @Override public void close() throws IOException { gen.writeEndObject(); } } private static final class JsonArray implements AutoCloseable { private final JsonGenerator gen; JsonArray(JsonGenerator gen, String name) throws IOException { this.gen = gen; gen.writeArrayFieldStart(name); } @Override public void close() throws IOException { gen.writeEndArray(); } } }
shoudl we count to show on status page?
private boolean addCommitToStream(Commit commit, CommitWorkStream commitStream) { Preconditions.checkNotNull(commit); if (commit.work().isFailed()) { return true; } final ComputationState state = commit.computationState(); final Windmill.WorkItemCommitRequest request = commit.request(); final int size = commit.getSize(); commit.work().setState(Work.State.COMMITTING); activeCommitBytes.addAndGet(size); if (commitStream.commitWorkItem( state.getComputationId(), request, (Windmill.CommitStatus status) -> { if (status != Windmill.CommitStatus.OK) { readerCache.invalidateReader( WindmillComputationKey.create( state.getComputationId(), request.getKey(), request.getShardingKey())); stateCache .forComputation(state.getComputationId()) .invalidate(request.getKey(), request.getShardingKey()); } activeCommitBytes.addAndGet(-size); state.completeWorkAndScheduleNextWorkForKey( ShardedKey.create(request.getKey(), request.getShardingKey()), request.getWorkToken()); })) { return true; } else { commit.work().setState(Work.State.COMMIT_QUEUED); activeCommitBytes.addAndGet(-size); return false; } }
return true;
private boolean addCommitToStream(Commit commit, CommitWorkStream commitStream) { Preconditions.checkNotNull(commit); if (commit.work().isFailed()) { return true; } final ComputationState state = commit.computationState(); final Windmill.WorkItemCommitRequest request = commit.request(); final int size = commit.getSize(); commit.work().setState(Work.State.COMMITTING); activeCommitBytes.addAndGet(size); if (commitStream.commitWorkItem( state.getComputationId(), request, (Windmill.CommitStatus status) -> { if (status != Windmill.CommitStatus.OK) { readerCache.invalidateReader( WindmillComputationKey.create( state.getComputationId(), request.getKey(), request.getShardingKey())); stateCache .forComputation(state.getComputationId()) .invalidate(request.getKey(), request.getShardingKey()); } activeCommitBytes.addAndGet(-size); state.completeWorkAndScheduleNextWorkForKey( ShardedKey.create(request.getKey(), request.getShardingKey()), request.getWorkToken()); })) { return true; } else { commit.work().setState(Work.State.COMMIT_QUEUED); activeCommitBytes.addAndGet(-size); return false; } }
class with beam_fn_api enabled", StreamingDataflowWorker.class.getSimpleName()); StreamingDataflowWorker worker = StreamingDataflowWorker.fromDataflowWorkerHarnessOptions(options); MetricsEnvironment.setProcessWideContainer(new MetricsLogger(null)); StreamingStepMetricsContainer.setEnablePerWorkerMetrics( options.isEnableStreamingEngine() && DataflowRunner.hasExperiment(options, "enable_per_worker_metrics")); JvmInitializers.runBeforeProcessing(options); worker.startStatusPages(); worker.start(); } public static StreamingDataflowWorker fromDataflowWorkerHarnessOptions( DataflowWorkerHarnessOptions options) throws IOException { StreamingDataflowWorker worker = new StreamingDataflowWorker( Collections.emptyList(), IntrinsicMapTaskExecutorFactory.defaultFactory(), new DataflowWorkUnitClient(options, LOG), options.as(StreamingDataflowWorkerOptions.class), true, new HotKeyLogger(), Instant::now, (threadName) -> Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder().setNameFormat(threadName).build())); options .as(StreamingDataflowWorkerOptions.class) .getWindmillServerStub() .setProcessHeartbeatResponses(worker::handleHeartbeatResponses); return worker; }
class with beam_fn_api enabled", StreamingDataflowWorker.class.getSimpleName()); StreamingDataflowWorker worker = StreamingDataflowWorker.fromDataflowWorkerHarnessOptions(options); MetricsEnvironment.setProcessWideContainer(new MetricsLogger(null)); StreamingStepMetricsContainer.setEnablePerWorkerMetrics( options.isEnableStreamingEngine() && DataflowRunner.hasExperiment(options, "enable_per_worker_metrics")); JvmInitializers.runBeforeProcessing(options); worker.startStatusPages(); worker.start(); } public static StreamingDataflowWorker fromDataflowWorkerHarnessOptions( DataflowWorkerHarnessOptions options) throws IOException { return new StreamingDataflowWorker( Collections.emptyList(), IntrinsicMapTaskExecutorFactory.defaultFactory(), new DataflowWorkUnitClient(options, LOG), options.as(StreamingDataflowWorkerOptions.class), true, new HotKeyLogger(), Instant::now, (threadName) -> Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder().setNameFormat(threadName).build())); }
```suggestion LOG.warn("Keys size is not equal to column size. Error={}", e.getMessage()); ```
private ArrayList<DropPartitionClause> getDropPartitionClause(OlapTable olapTable, Column partitionColumn, String partitionFormat) { ArrayList<DropPartitionClause> dropPartitionClauses = new ArrayList<>(); Calendar calendar = Calendar.getInstance(); DynamicPartitionProperty dynamicPartitionProperty = olapTable.getTableProperty().getDynamicPartitionProperty(); String lowerBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), dynamicPartitionProperty.getStart(), (Calendar) calendar.clone(), partitionFormat); String upperBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), 0, (Calendar) calendar.clone(), partitionFormat); PartitionValue lowerPartitionValue = new PartitionValue(lowerBorder); PartitionValue upperPartitionValue = new PartitionValue(upperBorder); Range<PartitionKey> reservePartitionKeyRange; try { PartitionKey lowerBound = PartitionKey.createPartitionKey(Collections.singletonList(lowerPartitionValue), Collections.singletonList(partitionColumn)); PartitionKey upperBound = PartitionKey.createPartitionKey(Collections.singletonList(upperPartitionValue), Collections.singletonList(partitionColumn)); reservePartitionKeyRange = Range.closedOpen(lowerBound, upperBound); } catch (AnalysisException e) { LOG.warn("Keys size is not equal to column size. Error=" + e.getMessage()); return dropPartitionClauses; } RangePartitionInfo info = (RangePartitionInfo) (olapTable.getPartitionInfo()); List<Map.Entry<Long, Range<PartitionKey>>> idToRanges = new ArrayList<>(info.getIdToRange().entrySet()); idToRanges.sort(Comparator.comparing(o -> o.getValue().upperEndpoint())); for (Map.Entry<Long, Range<PartitionKey>> idToRange : idToRanges) { try { Long checkDropPartitionId = idToRange.getKey(); Range<PartitionKey> checkDropPartitionKey = idToRange.getValue(); RangeUtils.checkRangeIntersect(reservePartitionKeyRange, checkDropPartitionKey); if (checkDropPartitionKey.upperEndpoint().compareTo(reservePartitionKeyRange.lowerEndpoint()) <= 0) { String dropPartitionName = olapTable.getPartition(checkDropPartitionId).getName(); dropPartitionClauses.add(new DropPartitionClause(false, dropPartitionName, false)); } } catch (DdlException e) { break; } } return dropPartitionClauses; }
LOG.warn("Keys size is not equal to column size. Error=" + e.getMessage());
private ArrayList<DropPartitionClause> getDropPartitionClause(OlapTable olapTable, Column partitionColumn, String partitionFormat) { ArrayList<DropPartitionClause> dropPartitionClauses = new ArrayList<>(); Calendar calendar = Calendar.getInstance(); DynamicPartitionProperty dynamicPartitionProperty = olapTable.getTableProperty().getDynamicPartitionProperty(); String lowerBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), dynamicPartitionProperty.getStart(), (Calendar) calendar.clone(), partitionFormat); String upperBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), 0, (Calendar) calendar.clone(), partitionFormat); PartitionValue lowerPartitionValue = new PartitionValue(lowerBorder); PartitionValue upperPartitionValue = new PartitionValue(upperBorder); Range<PartitionKey> reservePartitionKeyRange; try { PartitionKey lowerBound = PartitionKey.createPartitionKey(Collections.singletonList(lowerPartitionValue), Collections.singletonList(partitionColumn)); PartitionKey upperBound = PartitionKey.createPartitionKey(Collections.singletonList(upperPartitionValue), Collections.singletonList(partitionColumn)); reservePartitionKeyRange = Range.closedOpen(lowerBound, upperBound); } catch (AnalysisException e) { LOG.warn("Keys size is not equal to column size. Error={}", e.getMessage()); return dropPartitionClauses; } RangePartitionInfo info = (RangePartitionInfo) (olapTable.getPartitionInfo()); List<Map.Entry<Long, Range<PartitionKey>>> idToRanges = new ArrayList<>(info.getIdToRange(false).entrySet()); idToRanges.sort(Comparator.comparing(o -> o.getValue().upperEndpoint())); for (Map.Entry<Long, Range<PartitionKey>> idToRange : idToRanges) { try { Long checkDropPartitionId = idToRange.getKey(); Range<PartitionKey> checkDropPartitionKey = idToRange.getValue(); RangeUtils.checkRangeIntersect(reservePartitionKeyRange, checkDropPartitionKey); if (checkDropPartitionKey.upperEndpoint().compareTo(reservePartitionKeyRange.lowerEndpoint()) <= 0) { String dropPartitionName = olapTable.getPartition(checkDropPartitionId).getName(); dropPartitionClauses.add(new DropPartitionClause(false, dropPartitionName, false)); } } catch (DdlException e) { break; } } return dropPartitionClauses; }
class DynamicPartitionScheduler extends MasterDaemon { private static final Logger LOG = LogManager.getLogger(DynamicPartitionScheduler.class); public static final String LAST_SCHEDULER_TIME = "lastSchedulerTime"; public static final String LAST_UPDATE_TIME = "lastUpdateTime"; public static final String DYNAMIC_PARTITION_STATE = "dynamicPartitionState"; public static final String CREATE_PARTITION_MSG = "createPartitionMsg"; public static final String DROP_PARTITION_MSG = "dropPartitionMsg"; private final String DEFAULT_RUNTIME_VALUE = "N/A"; private Map<String, Map<String, String>> runtimeInfos = Maps.newConcurrentMap(); private Set<Pair<Long, Long>> dynamicPartitionTableInfo = Sets.newConcurrentHashSet(); private boolean initialize; public enum State { NORMAL, ERROR } public DynamicPartitionScheduler(String name, long intervalMs) { super(name, intervalMs); this.initialize = false; } public void registerDynamicPartitionTable(Long dbId, Long tableId) { dynamicPartitionTableInfo.add(new Pair<>(dbId, tableId)); } public void removeDynamicPartitionTable(Long dbId, Long tableId) { dynamicPartitionTableInfo.remove(new Pair<>(dbId, tableId)); } public String getRuntimeInfo(String tableName, String key) { Map<String, String> tableRuntimeInfo = runtimeInfos.getOrDefault(tableName, createDefaultRuntimeInfo()); return tableRuntimeInfo.getOrDefault(key, DEFAULT_RUNTIME_VALUE); } public void removeRuntimeInfo(String tableName) { runtimeInfos.remove(tableName); } public void createOrUpdateRuntimeInfo(String tableName, String key, String value) { Map<String, String> runtimeInfo = runtimeInfos.get(tableName); if (runtimeInfo == null) { runtimeInfo = createDefaultRuntimeInfo(); runtimeInfo.put(key, value); runtimeInfos.put(tableName, runtimeInfo); } else { runtimeInfo.put(key, value); } } private Map<String, String> createDefaultRuntimeInfo() { Map<String, String> defaultRuntimeInfo = Maps.newConcurrentMap(); defaultRuntimeInfo.put(LAST_UPDATE_TIME, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(LAST_SCHEDULER_TIME, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); defaultRuntimeInfo.put(CREATE_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(DROP_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); return defaultRuntimeInfo; } private ArrayList<AddPartitionClause> getAddPartitionClause(OlapTable olapTable, Column partitionColumn, String partitionFormat) { ArrayList<AddPartitionClause> addPartitionClauses = new ArrayList<>(); Calendar calendar = Calendar.getInstance(); DynamicPartitionProperty dynamicPartitionProperty = olapTable.getTableProperty().getDynamicPartitionProperty(); for (int i = 0; i <= dynamicPartitionProperty.getEnd(); i++) { String prevBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), i, (Calendar) calendar.clone(), partitionFormat); String nextBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), i + 1, (Calendar) calendar.clone(), partitionFormat); PartitionValue lowerValue = new PartitionValue(prevBorder); PartitionValue upperValue = new PartitionValue(nextBorder); PartitionInfo partitionInfo = olapTable.getPartitionInfo(); RangePartitionInfo info = (RangePartitionInfo) (partitionInfo); boolean isPartitionExists = false; Range<PartitionKey> addPartitionKeyRange; try { PartitionKey lowerBound = PartitionKey.createPartitionKey(Collections.singletonList(lowerValue), Collections.singletonList(partitionColumn)); PartitionKey upperBound = PartitionKey.createPartitionKey(Collections.singletonList(upperValue), Collections.singletonList(partitionColumn)); addPartitionKeyRange = Range.closedOpen(lowerBound, upperBound); } catch (AnalysisException e) { LOG.warn("Keys size is not equal to column size. Error=" + e.getMessage()); continue; } for (Range<PartitionKey> partitionKeyRange : info.getIdToRange().values()) { try { RangeUtils.checkRangeIntersect(partitionKeyRange, addPartitionKeyRange); } catch (DdlException e) { isPartitionExists = true; if (addPartitionKeyRange.equals(partitionKeyRange)) { clearCreatePartitionFailedMsg(olapTable.getName()); } else { recordCreatePartitionFailedMsg(olapTable.getName(), e.getMessage()); } break; } } if (isPartitionExists) { continue; } PartitionKeyDesc partitionKeyDesc = new PartitionKeyDesc(Collections.singletonList(lowerValue), Collections.singletonList(upperValue)); HashMap<String, String> partitionProperties = new HashMap<>(1); partitionProperties.put("replication_num", String.valueOf(DynamicPartitionUtil.estimateReplicateNum(olapTable))); String partitionName = dynamicPartitionProperty.getPrefix() + DynamicPartitionUtil.getFormattedPartitionName(prevBorder); SingleRangePartitionDesc rangePartitionDesc = new SingleRangePartitionDesc(true, partitionName, partitionKeyDesc, partitionProperties); HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) olapTable.getDefaultDistributionInfo(); List<String> distColumnNames = new ArrayList<>(); for (Column distributionColumn : hashDistributionInfo.getDistributionColumns()) { distColumnNames.add(distributionColumn.getName()); } DistributionDesc distributionDesc = new HashDistributionDesc(dynamicPartitionProperty.getBuckets(), distColumnNames); addPartitionClauses.add(new AddPartitionClause(rangePartitionDesc, distributionDesc, null, false)); } return addPartitionClauses; } private void executeDynamicPartition() { Iterator<Pair<Long, Long>> iterator = dynamicPartitionTableInfo.iterator(); while (iterator.hasNext()) { Pair<Long, Long> tableInfo = iterator.next(); Long dbId = tableInfo.first; Long tableId = tableInfo.second; Database db = Catalog.getInstance().getDb(dbId); if (db == null) { iterator.remove(); continue; } ArrayList<AddPartitionClause> addPartitionClauses = new ArrayList<>(); ArrayList<DropPartitionClause> dropPartitionClauses; String tableName; boolean skipAddPartition = false; db.readLock(); OlapTable olapTable; try { olapTable = (OlapTable) db.getTable(tableId); if (olapTable == null || !olapTable.dynamicPartitionExists() || !olapTable.getTableProperty().getDynamicPartitionProperty().getEnable()) { iterator.remove(); continue; } if (olapTable.getState() != OlapTable.OlapTableState.NORMAL) { String errorMsg = "Table[" + olapTable.getName() + "]'s state is not NORMAL." + "Do not allow doing dynamic add partition. table state=" + olapTable.getState(); recordCreatePartitionFailedMsg(olapTable.getName(), errorMsg); LOG.info(errorMsg); skipAddPartition = true; } createOrUpdateRuntimeInfo(olapTable.getName(), LAST_SCHEDULER_TIME, TimeUtils.getCurrentFormatTime()); RangePartitionInfo rangePartitionInfo = (RangePartitionInfo) olapTable.getPartitionInfo(); Column partitionColumn = rangePartitionInfo.getPartitionColumns().get(0); String partitionFormat; try { partitionFormat = DynamicPartitionUtil.getPartitionFormat(partitionColumn); } catch (DdlException e) { recordCreatePartitionFailedMsg(olapTable.getName(), e.getMessage()); continue; } if (!skipAddPartition) { addPartitionClauses = getAddPartitionClause(olapTable, partitionColumn, partitionFormat); } dropPartitionClauses = getDropPartitionClause(olapTable, partitionColumn, partitionFormat); tableName = olapTable.getName(); } finally { db.readUnlock(); } for (DropPartitionClause dropPartitionClause : dropPartitionClauses) { db.writeLock(); try { Catalog.getCurrentCatalog().dropPartition(db, olapTable, dropPartitionClause); clearDropPartitionFailedMsg(tableName); } catch (DdlException e) { recordDropPartitionFailedMsg(tableName, e.getMessage()); } finally { db.writeUnlock(); } } if (!skipAddPartition) { for (AddPartitionClause addPartitionClause : addPartitionClauses) { try { Catalog.getCurrentCatalog().addPartition(db, tableName, addPartitionClause); clearCreatePartitionFailedMsg(tableName); } catch (DdlException e) { recordCreatePartitionFailedMsg(tableName, e.getMessage()); } } } } } private void recordCreatePartitionFailedMsg(String tableName, String msg) { LOG.warn("dynamic add partition failed: " + msg); createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.ERROR.toString()); createOrUpdateRuntimeInfo(tableName, CREATE_PARTITION_MSG, msg); } private void clearCreatePartitionFailedMsg(String tableName) { createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); createOrUpdateRuntimeInfo(tableName, CREATE_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); } private void recordDropPartitionFailedMsg(String tableName, String msg) { LOG.warn("dynamic drop partition failed: " + msg); createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.ERROR.toString()); createOrUpdateRuntimeInfo(tableName, DROP_PARTITION_MSG, msg); } private void clearDropPartitionFailedMsg(String tableName) { createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); createOrUpdateRuntimeInfo(tableName, DROP_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); } private void initDynamicPartitionTable() { for (Long dbId : Catalog.getInstance().getDbIds()) { Database db = Catalog.getInstance().getDb(dbId); if (db == null) { continue; } db.readLock(); try { for (Table table : Catalog.getInstance().getDb(dbId).getTables()) { if (DynamicPartitionUtil.isDynamicPartitionTable(table)) { registerDynamicPartitionTable(db.getId(), table.getId()); } } } finally { db.readUnlock(); } } initialize = true; } @Override protected void runAfterCatalogReady() { if (!initialize) { initDynamicPartitionTable(); } if (Config.dynamic_partition_enable) { executeDynamicPartition(); } } }
class DynamicPartitionScheduler extends MasterDaemon { private static final Logger LOG = LogManager.getLogger(DynamicPartitionScheduler.class); public static final String LAST_SCHEDULER_TIME = "lastSchedulerTime"; public static final String LAST_UPDATE_TIME = "lastUpdateTime"; public static final String DYNAMIC_PARTITION_STATE = "dynamicPartitionState"; public static final String CREATE_PARTITION_MSG = "createPartitionMsg"; public static final String DROP_PARTITION_MSG = "dropPartitionMsg"; private final String DEFAULT_RUNTIME_VALUE = "N/A"; private Map<String, Map<String, String>> runtimeInfos = Maps.newConcurrentMap(); private Set<Pair<Long, Long>> dynamicPartitionTableInfo = Sets.newConcurrentHashSet(); private boolean initialize; public enum State { NORMAL, ERROR } public DynamicPartitionScheduler(String name, long intervalMs) { super(name, intervalMs); this.initialize = false; } public void registerDynamicPartitionTable(Long dbId, Long tableId) { dynamicPartitionTableInfo.add(new Pair<>(dbId, tableId)); } public void removeDynamicPartitionTable(Long dbId, Long tableId) { dynamicPartitionTableInfo.remove(new Pair<>(dbId, tableId)); } public String getRuntimeInfo(String tableName, String key) { Map<String, String> tableRuntimeInfo = runtimeInfos.getOrDefault(tableName, createDefaultRuntimeInfo()); return tableRuntimeInfo.getOrDefault(key, DEFAULT_RUNTIME_VALUE); } public void removeRuntimeInfo(String tableName) { runtimeInfos.remove(tableName); } public void createOrUpdateRuntimeInfo(String tableName, String key, String value) { Map<String, String> runtimeInfo = runtimeInfos.get(tableName); if (runtimeInfo == null) { runtimeInfo = createDefaultRuntimeInfo(); runtimeInfo.put(key, value); runtimeInfos.put(tableName, runtimeInfo); } else { runtimeInfo.put(key, value); } } private Map<String, String> createDefaultRuntimeInfo() { Map<String, String> defaultRuntimeInfo = Maps.newConcurrentMap(); defaultRuntimeInfo.put(LAST_UPDATE_TIME, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(LAST_SCHEDULER_TIME, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); defaultRuntimeInfo.put(CREATE_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); defaultRuntimeInfo.put(DROP_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); return defaultRuntimeInfo; } private ArrayList<AddPartitionClause> getAddPartitionClause(OlapTable olapTable, Column partitionColumn, String partitionFormat) { ArrayList<AddPartitionClause> addPartitionClauses = new ArrayList<>(); Calendar calendar = Calendar.getInstance(); DynamicPartitionProperty dynamicPartitionProperty = olapTable.getTableProperty().getDynamicPartitionProperty(); for (int i = 0; i <= dynamicPartitionProperty.getEnd(); i++) { String prevBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), i, (Calendar) calendar.clone(), partitionFormat); String nextBorder = DynamicPartitionUtil.getPartitionRange(dynamicPartitionProperty.getTimeUnit(), i + 1, (Calendar) calendar.clone(), partitionFormat); PartitionValue lowerValue = new PartitionValue(prevBorder); PartitionValue upperValue = new PartitionValue(nextBorder); PartitionInfo partitionInfo = olapTable.getPartitionInfo(); RangePartitionInfo info = (RangePartitionInfo) (partitionInfo); boolean isPartitionExists = false; Range<PartitionKey> addPartitionKeyRange; try { PartitionKey lowerBound = PartitionKey.createPartitionKey(Collections.singletonList(lowerValue), Collections.singletonList(partitionColumn)); PartitionKey upperBound = PartitionKey.createPartitionKey(Collections.singletonList(upperValue), Collections.singletonList(partitionColumn)); addPartitionKeyRange = Range.closedOpen(lowerBound, upperBound); } catch (AnalysisException e) { LOG.warn("Keys size is not equal to column size. Error={}", e.getMessage()); continue; } for (Range<PartitionKey> partitionKeyRange : info.getIdToRange(false).values()) { try { RangeUtils.checkRangeIntersect(partitionKeyRange, addPartitionKeyRange); } catch (DdlException e) { isPartitionExists = true; if (addPartitionKeyRange.equals(partitionKeyRange)) { clearCreatePartitionFailedMsg(olapTable.getName()); } else { recordCreatePartitionFailedMsg(olapTable.getName(), e.getMessage()); } break; } } if (isPartitionExists) { continue; } PartitionKeyDesc partitionKeyDesc = new PartitionKeyDesc(Collections.singletonList(lowerValue), Collections.singletonList(upperValue)); HashMap<String, String> partitionProperties = new HashMap<>(1); partitionProperties.put("replication_num", String.valueOf(DynamicPartitionUtil.estimateReplicateNum(olapTable))); String partitionName = dynamicPartitionProperty.getPrefix() + DynamicPartitionUtil.getFormattedPartitionName(prevBorder); SingleRangePartitionDesc rangePartitionDesc = new SingleRangePartitionDesc(true, partitionName, partitionKeyDesc, partitionProperties); HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) olapTable.getDefaultDistributionInfo(); List<String> distColumnNames = new ArrayList<>(); for (Column distributionColumn : hashDistributionInfo.getDistributionColumns()) { distColumnNames.add(distributionColumn.getName()); } DistributionDesc distributionDesc = new HashDistributionDesc(dynamicPartitionProperty.getBuckets(), distColumnNames); addPartitionClauses.add(new AddPartitionClause(rangePartitionDesc, distributionDesc, null, false)); } return addPartitionClauses; } private void executeDynamicPartition() { Iterator<Pair<Long, Long>> iterator = dynamicPartitionTableInfo.iterator(); while (iterator.hasNext()) { Pair<Long, Long> tableInfo = iterator.next(); Long dbId = tableInfo.first; Long tableId = tableInfo.second; Database db = Catalog.getInstance().getDb(dbId); if (db == null) { iterator.remove(); continue; } ArrayList<AddPartitionClause> addPartitionClauses = new ArrayList<>(); ArrayList<DropPartitionClause> dropPartitionClauses; String tableName; boolean skipAddPartition = false; db.readLock(); OlapTable olapTable; try { olapTable = (OlapTable) db.getTable(tableId); if (olapTable == null || !olapTable.dynamicPartitionExists() || !olapTable.getTableProperty().getDynamicPartitionProperty().getEnable()) { iterator.remove(); continue; } if (olapTable.getState() != OlapTable.OlapTableState.NORMAL) { String errorMsg = "Table[" + olapTable.getName() + "]'s state is not NORMAL." + "Do not allow doing dynamic add partition. table state=" + olapTable.getState(); recordCreatePartitionFailedMsg(olapTable.getName(), errorMsg); LOG.info(errorMsg); skipAddPartition = true; } createOrUpdateRuntimeInfo(olapTable.getName(), LAST_SCHEDULER_TIME, TimeUtils.getCurrentFormatTime()); RangePartitionInfo rangePartitionInfo = (RangePartitionInfo) olapTable.getPartitionInfo(); Column partitionColumn = rangePartitionInfo.getPartitionColumns().get(0); String partitionFormat; try { partitionFormat = DynamicPartitionUtil.getPartitionFormat(partitionColumn); } catch (DdlException e) { recordCreatePartitionFailedMsg(olapTable.getName(), e.getMessage()); continue; } if (!skipAddPartition) { addPartitionClauses = getAddPartitionClause(olapTable, partitionColumn, partitionFormat); } dropPartitionClauses = getDropPartitionClause(olapTable, partitionColumn, partitionFormat); tableName = olapTable.getName(); } finally { db.readUnlock(); } for (DropPartitionClause dropPartitionClause : dropPartitionClauses) { db.writeLock(); try { Catalog.getCurrentCatalog().dropPartition(db, olapTable, dropPartitionClause); clearDropPartitionFailedMsg(tableName); } catch (DdlException e) { recordDropPartitionFailedMsg(tableName, e.getMessage()); } finally { db.writeUnlock(); } } if (!skipAddPartition) { for (AddPartitionClause addPartitionClause : addPartitionClauses) { try { Catalog.getCurrentCatalog().addPartition(db, tableName, addPartitionClause); clearCreatePartitionFailedMsg(tableName); } catch (DdlException e) { recordCreatePartitionFailedMsg(tableName, e.getMessage()); } } } } } private void recordCreatePartitionFailedMsg(String tableName, String msg) { LOG.warn("dynamic add partition failed: " + msg); createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.ERROR.toString()); createOrUpdateRuntimeInfo(tableName, CREATE_PARTITION_MSG, msg); } private void clearCreatePartitionFailedMsg(String tableName) { createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); createOrUpdateRuntimeInfo(tableName, CREATE_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); } private void recordDropPartitionFailedMsg(String tableName, String msg) { LOG.warn("dynamic drop partition failed: " + msg); createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.ERROR.toString()); createOrUpdateRuntimeInfo(tableName, DROP_PARTITION_MSG, msg); } private void clearDropPartitionFailedMsg(String tableName) { createOrUpdateRuntimeInfo(tableName, DYNAMIC_PARTITION_STATE, State.NORMAL.toString()); createOrUpdateRuntimeInfo(tableName, DROP_PARTITION_MSG, DEFAULT_RUNTIME_VALUE); } private void initDynamicPartitionTable() { for (Long dbId : Catalog.getInstance().getDbIds()) { Database db = Catalog.getInstance().getDb(dbId); if (db == null) { continue; } db.readLock(); try { for (Table table : Catalog.getInstance().getDb(dbId).getTables()) { if (DynamicPartitionUtil.isDynamicPartitionTable(table)) { registerDynamicPartitionTable(db.getId(), table.getId()); } } } finally { db.readUnlock(); } } initialize = true; } @Override protected void runAfterCatalogReady() { if (!initialize) { initDynamicPartitionTable(); } if (Config.dynamic_partition_enable) { executeDynamicPartition(); } } }
Shall we combine the above two lines?
public void onMessage(HTTPCarbonMessage inboundMessage) { try { HttpResource httpResource; if (accessed(inboundMessage)) { if (inboundMessage.getProperty(HTTP_RESOURCE) instanceof String) { if (inboundMessage.getProperty(HTTP_RESOURCE).equals( WebSubSubscriberConstants.ANNOTATED_TOPIC)) { autoRespondToIntentVerification(inboundMessage); return; } else { httpResource = WebSubDispatcher.findResource(webSubServicesRegistry, inboundMessage); } } else { httpResource = (HttpResource) inboundMessage.getProperty(HTTP_RESOURCE); } extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); return; } httpResource = WebSubDispatcher.findResource(webSubServicesRegistry, inboundMessage); if (inboundMessage.getProperty(HTTP_RESOURCE) == null) { inboundMessage.setProperty(HTTP_RESOURCE, httpResource); return; } else if (inboundMessage.getProperty(HTTP_RESOURCE) instanceof String) { return; } extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); } catch (BallerinaException ex) { try { HttpUtil.handleFailure(inboundMessage, new BallerinaConnectorException(ex.getMessage(), ex.getCause())); } catch (Exception e) { log.error("Cannot handle error using the error handler for: " + e.getMessage(), e); } } }
WebSubSubscriberConstants.ANNOTATED_TOPIC)) {
public void onMessage(HTTPCarbonMessage inboundMessage) { try { HttpResource httpResource; if (accessed(inboundMessage)) { if (inboundMessage.getProperty(HTTP_RESOURCE) instanceof String) { if (inboundMessage.getProperty(HTTP_RESOURCE).equals(ANNOTATED_TOPIC)) { autoRespondToIntentVerification(inboundMessage); return; } else { httpResource = WebSubDispatcher.findResource(webSubServicesRegistry, inboundMessage); } } else { httpResource = (HttpResource) inboundMessage.getProperty(HTTP_RESOURCE); } extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); return; } httpResource = WebSubDispatcher.findResource(webSubServicesRegistry, inboundMessage); if (inboundMessage.getProperty(HTTP_RESOURCE) == null) { inboundMessage.setProperty(HTTP_RESOURCE, httpResource); return; } else if (inboundMessage.getProperty(HTTP_RESOURCE) instanceof String) { return; } extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); } catch (BallerinaException ex) { try { HttpUtil.handleFailure(inboundMessage, new BallerinaConnectorException(ex.getMessage(), ex.getCause())); } catch (Exception e) { log.error("Cannot handle error using the error handler for: " + e.getMessage(), e); } } }
class BallerinaWebSubConnectionListener extends BallerinaHTTPConnectorListener { private static final Logger log = LoggerFactory.getLogger(BallerinaWebSubConnectionListener.class); private WebSubServicesRegistry webSubServicesRegistry; private PrintStream console = System.out; public BallerinaWebSubConnectionListener(WebSubServicesRegistry webSubServicesRegistry, Value[] filterHolders) { super(webSubServicesRegistry, filterHolders); this.webSubServicesRegistry = webSubServicesRegistry; } @Override protected void extractPropertiesAndStartResourceExecution(HTTPCarbonMessage httpCarbonMessage, HttpResource httpResource) { BValue subscriberServiceEndpoint = getSubscriberServiceEndpoint(httpResource, httpCarbonMessage); BValue httpRequest; if (httpCarbonMessage.getProperty(WebSubSubscriberConstants.ENTITY_ACCESSED_REQUEST) != null) { httpRequest = (BValue) httpCarbonMessage.getProperty(WebSubSubscriberConstants.ENTITY_ACCESSED_REQUEST); } else { httpRequest = getHttpRequest(httpResource, httpCarbonMessage); } WorkerExecutionContext parentCtx = new WorkerExecutionContext( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile()); invokeRequestFilters(httpCarbonMessage, httpRequest, getRequestFilterContext(httpResource), parentCtx); Resource balResource = httpResource.getBalResource(); List<ParamDetail> paramDetails = balResource.getParamDetails(); BValue[] signatureParams = new BValue[paramDetails.size()]; String resourceName = httpResource.getName(); if (WebSubSubscriberConstants.RESOURCE_NAME_ON_INTENT_VERIFICATION.equals(resourceName)) { signatureParams[0] = subscriberServiceEndpoint; BStruct intentVerificationRequestStruct = createIntentVerificationRequestStruct(balResource); if (httpCarbonMessage.getProperty(HttpConstants.QUERY_STR) != null) { String queryString = (String) httpCarbonMessage.getProperty(HttpConstants.QUERY_STR); BMap<String, BString> params = new BMap<>(); try { URIUtil.populateQueryParamMap(queryString, params); intentVerificationRequestStruct.setStringField(0, params.get(WebSubSubscriberConstants.PARAM_HUB_MODE).stringValue()); intentVerificationRequestStruct.setStringField(1, params.get(WebSubSubscriberConstants.PARAM_HUB_TOPIC).stringValue()); intentVerificationRequestStruct.setStringField(2, params.get(WebSubSubscriberConstants.PARAM_HUB_CHALLENGE).stringValue()); if (params.hasKey(WebSubSubscriberConstants.PARAM_HUB_LEASE_SECONDS)) { intentVerificationRequestStruct.setIntField(0, Integer.parseInt( params.get(WebSubSubscriberConstants.PARAM_HUB_LEASE_SECONDS).stringValue())); } } catch (UnsupportedEncodingException e) { throw new BallerinaException("Error populating query map for intent verification request received: " + e.getMessage()); } } intentVerificationRequestStruct.setRefField(0, (BRefType) httpRequest); signatureParams[1] = intentVerificationRequestStruct; } else { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); response.setProperty(HttpConstants.HTTP_STATUS_CODE, 202); response.addHttpContent(new DefaultLastHttpContent()); HttpUtil.sendOutboundResponse(httpCarbonMessage, response); BStruct notificationRequestStruct = createNotificationRequestStruct(balResource); BStruct entityStruct = MimeUtil.extractEntity((BStruct) httpRequest); if (entityStruct != null) { if (entityStruct.getNativeData(Constants.MESSAGE_DATA_SOURCE) instanceof BJSON) { BJSON jsonBody = (BJSON) (entityStruct.getNativeData(Constants.MESSAGE_DATA_SOURCE)); notificationRequestStruct.setRefField(0, jsonBody); } else { console.println("ballerina: Non-JSON payload received as WebSub Notification"); } } notificationRequestStruct.setRefField(1, (BRefType) httpRequest); signatureParams[0] = notificationRequestStruct; } CallableUnitCallback callback = new WebSubEmptyCallableUnitCallback(); Executor.submit(balResource, callback, null, null, signatureParams); } /** * Method to retrieve the struct representing the WebSub subscriber service endpoint. * * @param httpResource the resource of the service receiving the request * @param httpCarbonMessage the HTTP message representing the request received * @return the struct representing the subscriber service endpoint */ private BStruct getSubscriberServiceEndpoint(HttpResource httpResource, HTTPCarbonMessage httpCarbonMessage) { BStruct subscriberServiceEndpoint = createSubscriberServiceEndpointStruct(httpResource.getBalResource()); BStruct serviceEndpoint = BLangConnectorSPIUtil.createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.SERVICE_ENDPOINT); BStruct connection = BLangConnectorSPIUtil.createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.CONNECTION); HttpUtil.enrichServiceEndpointInfo(serviceEndpoint, httpCarbonMessage, httpResource); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage); serviceEndpoint.setRefField(HttpConstants.SERVICE_ENDPOINT_CONNECTION_INDEX, connection); subscriberServiceEndpoint.setRefField(1, serviceEndpoint); return subscriberServiceEndpoint; } /** * Method to retrieve the struct representing the HTTP request received. * * @param httpResource the resource receiving the request * @param httpCarbonMessage the HTTP message representing the request received * @return the struct representing the HTTP request received */ private BStruct getHttpRequest(HttpResource httpResource, HTTPCarbonMessage httpCarbonMessage) { BStruct httpRequest = createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.REQUEST); BStruct inRequestEntity = createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), org.ballerinalang.mime.util.Constants.PROTOCOL_PACKAGE_MIME, Constants.ENTITY); BStruct mediaType = createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), org.ballerinalang.mime.util.Constants.PROTOCOL_PACKAGE_MIME, Constants.MEDIA_TYPE); BStruct cacheControlStruct = createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.REQUEST_CACHE_CONTROL); RequestCacheControlStruct requestCacheControl = new RequestCacheControlStruct(cacheControlStruct); HttpUtil.populateInboundRequest(httpRequest, inRequestEntity, mediaType, httpCarbonMessage, requestCacheControl); return httpRequest; } /** * Method to create the struct representing the WebSub subscriber service endpoint. */ private BStruct createSubscriberServiceEndpointStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WebSubSubscriberConstants.WEBSUB_PACKAGE_PATH, WebSubSubscriberConstants.SERVICE_ENDPOINT); } /** * Method to create the intent verification request struct representing a subscription/unsubscription intent * verification request received. */ private BStruct createIntentVerificationRequestStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WebSubSubscriberConstants.WEBSUB_PACKAGE_PATH, WebSubSubscriberConstants.STRUCT_WEBSUB_INTENT_VERIFICATION_REQUEST); } /** * Method to create the notification request struct representing WebSub notifications received. */ private BStruct createNotificationRequestStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WebSubSubscriberConstants.WEBSUB_PACKAGE_PATH, WebSubSubscriberConstants.STRUCT_WEBSUB_NOTIFICATION_REQUEST); } private BStruct createBStruct(ProgramFile programFile, String packagePath, String structName) { return BLangConnectorSPIUtil.createBStruct(programFile, packagePath, structName); } /** * Method to automatically respond to intent verification requests for subscriptions/unsubscriptions if a resource * named {@link WebSubSubscriberConstants * * @param httpCarbonMessage the message/request received */ private void autoRespondToIntentVerification(HTTPCarbonMessage httpCarbonMessage) { String annotatedTopic = httpCarbonMessage.getProperty(WebSubSubscriberConstants.ANNOTATED_TOPIC).toString(); if (httpCarbonMessage.getProperty(HttpConstants.QUERY_STR) != null) { String queryString = (String) httpCarbonMessage.getProperty(HttpConstants.QUERY_STR); BMap<String, BString> params = new BMap<>(); try { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); URIUtil.populateQueryParamMap(queryString, params); String mode = params.get(WebSubSubscriberConstants.PARAM_HUB_MODE).stringValue(); if ((WebSubSubscriberConstants.SUBSCRIBE.equals(mode) || WebSubSubscriberConstants.UNSUBSCRIBE.equals(mode)) && annotatedTopic.equals(params.get(WebSubSubscriberConstants.PARAM_HUB_TOPIC).stringValue())) { String challenge = params.get( WebSubSubscriberConstants.PARAM_HUB_CHALLENGE).stringValue(); response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer( challenge.getBytes(StandardCharsets.UTF_8)))); response.setHeader(HttpHeaderNames.CONTENT_TYPE.toString(), Constants.TEXT_PLAIN); response.setProperty(HttpConstants.HTTP_STATUS_CODE, 202); String intentVerificationMessage = "ballerina: Intent Verification agreed - Mode [" + mode + "], Topic [" + annotatedTopic + "]"; if (params.hasKey(WebSubSubscriberConstants.PARAM_HUB_LEASE_SECONDS)) { intentVerificationMessage = intentVerificationMessage.concat(", Lease Seconds [" + params.get(WebSubSubscriberConstants.PARAM_HUB_LEASE_SECONDS) + "]"); } console.println(intentVerificationMessage); } else { console.println("ballerina: Intent Verification denied - Mode [" + mode + "], Topic [" + annotatedTopic + "]"); response.setProperty(HttpConstants.HTTP_STATUS_CODE, 404); response.addHttpContent(new DefaultLastHttpContent()); } HttpUtil.sendOutboundResponse(httpCarbonMessage, response); } catch (UnsupportedEncodingException e) { throw new BallerinaConnectorException("Error responding to intent verification request: " + e.getMessage()); } } } }
class BallerinaWebSubConnectionListener extends BallerinaHTTPConnectorListener { private static final Logger log = LoggerFactory.getLogger(BallerinaWebSubConnectionListener.class); private WebSubServicesRegistry webSubServicesRegistry; private PrintStream console = System.out; public BallerinaWebSubConnectionListener(WebSubServicesRegistry webSubServicesRegistry, Value[] filterHolders) { super(webSubServicesRegistry, filterHolders); this.webSubServicesRegistry = webSubServicesRegistry; } @Override protected void extractPropertiesAndStartResourceExecution(HTTPCarbonMessage httpCarbonMessage, HttpResource httpResource) { BValue subscriberServiceEndpoint = getSubscriberServiceEndpoint(httpResource, httpCarbonMessage); BValue httpRequest; if (httpCarbonMessage.getProperty(ENTITY_ACCESSED_REQUEST) != null) { httpRequest = (BValue) httpCarbonMessage.getProperty(ENTITY_ACCESSED_REQUEST); } else { httpRequest = WebSubUtils.getHttpRequest(httpResource.getBalResource().getResourceInfo().getServiceInfo() .getPackageInfo().getProgramFile(), httpCarbonMessage); } WorkerExecutionContext parentCtx = new WorkerExecutionContext( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile()); invokeRequestFilters(httpCarbonMessage, httpRequest, getRequestFilterContext(httpResource), parentCtx); Resource balResource = httpResource.getBalResource(); List<ParamDetail> paramDetails = balResource.getParamDetails(); BValue[] signatureParams = new BValue[paramDetails.size()]; String resourceName = httpResource.getName(); if (RESOURCE_NAME_ON_INTENT_VERIFICATION.equals(resourceName)) { signatureParams[0] = subscriberServiceEndpoint; BStruct intentVerificationRequestStruct = createIntentVerificationRequestStruct(balResource); if (httpCarbonMessage.getProperty(HttpConstants.QUERY_STR) != null) { String queryString = (String) httpCarbonMessage.getProperty(HttpConstants.QUERY_STR); BMap<String, BString> params = new BMap<>(); try { URIUtil.populateQueryParamMap(queryString, params); intentVerificationRequestStruct.setStringField(0, params.get(PARAM_HUB_MODE).stringValue()); intentVerificationRequestStruct.setStringField(1, params.get(PARAM_HUB_TOPIC).stringValue()); intentVerificationRequestStruct.setStringField(2, params.get(PARAM_HUB_CHALLENGE).stringValue()); if (params.hasKey(PARAM_HUB_LEASE_SECONDS)) { intentVerificationRequestStruct.setIntField(0, Integer.parseInt( params.get(PARAM_HUB_LEASE_SECONDS).stringValue())); } } catch (UnsupportedEncodingException e) { throw new BallerinaException("Error populating query map for intent verification request received: " + e.getMessage()); } } intentVerificationRequestStruct.setRefField(0, (BRefType) httpRequest); signatureParams[1] = intentVerificationRequestStruct; } else { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); response.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpResponseStatus.ACCEPTED.code()); response.addHttpContent(new DefaultLastHttpContent()); HttpUtil.sendOutboundResponse(httpCarbonMessage, response); BStruct notificationRequestStruct = createNotificationRequestStruct(balResource); BStruct entityStruct = MimeUtil.extractEntity((BStruct) httpRequest); if (entityStruct != null) { if (entityStruct.getNativeData(Constants.MESSAGE_DATA_SOURCE) instanceof BJSON) { BJSON jsonBody = (BJSON) (entityStruct.getNativeData(Constants.MESSAGE_DATA_SOURCE)); notificationRequestStruct.setRefField(0, jsonBody); } else { console.println("ballerina: Non-JSON payload received as WebSub Notification"); } } notificationRequestStruct.setRefField(1, (BRefType) httpRequest); signatureParams[0] = notificationRequestStruct; } CallableUnitCallback callback = new WebSubEmptyCallableUnitCallback(); Executor.submit(balResource, callback, null, null, signatureParams); } /** * Method to retrieve the struct representing the WebSub subscriber service endpoint. * * @param httpResource the resource of the service receiving the request * @param httpCarbonMessage the HTTP message representing the request received * @return the struct representing the subscriber service endpoint */ private BStruct getSubscriberServiceEndpoint(HttpResource httpResource, HTTPCarbonMessage httpCarbonMessage) { BStruct subscriberServiceEndpoint = createSubscriberServiceEndpointStruct(httpResource.getBalResource()); BStruct serviceEndpoint = BLangConnectorSPIUtil.createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.SERVICE_ENDPOINT); BStruct connection = BLangConnectorSPIUtil.createBStruct( httpResource.getBalResource().getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.CONNECTION); HttpUtil.enrichServiceEndpointInfo(serviceEndpoint, httpCarbonMessage, httpResource); HttpUtil.enrichConnectionInfo(connection, httpCarbonMessage); serviceEndpoint.setRefField(HttpConstants.SERVICE_ENDPOINT_CONNECTION_INDEX, connection); subscriberServiceEndpoint.setRefField(1, serviceEndpoint); return subscriberServiceEndpoint; } /** * Method to create the struct representing the WebSub subscriber service endpoint. */ private BStruct createSubscriberServiceEndpointStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WEBSUB_PACKAGE_PATH, SERVICE_ENDPOINT); } /** * Method to create the intent verification request struct representing a subscription/unsubscription intent * verification request received. */ private BStruct createIntentVerificationRequestStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WEBSUB_PACKAGE_PATH, STRUCT_WEBSUB_INTENT_VERIFICATION_REQUEST); } /** * Method to create the notification request struct representing WebSub notifications received. */ private BStruct createNotificationRequestStruct(Resource resource) { return createBStruct(resource.getResourceInfo().getServiceInfo().getPackageInfo().getProgramFile(), WEBSUB_PACKAGE_PATH, STRUCT_WEBSUB_NOTIFICATION_REQUEST); } private BStruct createBStruct(ProgramFile programFile, String packagePath, String structName) { return BLangConnectorSPIUtil.createBStruct(programFile, packagePath, structName); } /** * Method to automatically respond to intent verification requests for subscriptions/unsubscriptions if a resource * named {@link WebSubSubscriberConstants * * @param httpCarbonMessage the message/request received */ private void autoRespondToIntentVerification(HTTPCarbonMessage httpCarbonMessage) { String annotatedTopic = httpCarbonMessage.getProperty(ANNOTATED_TOPIC).toString(); if (httpCarbonMessage.getProperty(HttpConstants.QUERY_STR) != null) { String queryString = (String) httpCarbonMessage.getProperty(HttpConstants.QUERY_STR); BMap<String, BString> params = new BMap<>(); try { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); URIUtil.populateQueryParamMap(queryString, params); String mode = params.get(PARAM_HUB_MODE).stringValue(); if (!params.keySet().contains(PARAM_HUB_MODE) || !params.keySet().contains(PARAM_HUB_TOPIC) || !params.keySet().contains(PARAM_HUB_CHALLENGE)) { response.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpResponseStatus.NOT_FOUND.code()); response.addHttpContent(new DefaultLastHttpContent()); HttpUtil.sendOutboundResponse(httpCarbonMessage, response); console.println("ballerina: Error auto-responding to intent verification request: Mode, Topic " + "and/or callback not specified"); } if ((SUBSCRIBE.equals(mode) || UNSUBSCRIBE.equals(mode)) && annotatedTopic.equals(params.get(PARAM_HUB_TOPIC).stringValue())) { String challenge = params.get(PARAM_HUB_CHALLENGE).stringValue(); response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer( challenge.getBytes(StandardCharsets.UTF_8)))); response.setHeader(HttpHeaderNames.CONTENT_TYPE.toString(), Constants.TEXT_PLAIN); response.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpResponseStatus.ACCEPTED.code()); String intentVerificationMessage = "ballerina: Intent Verification agreed - Mode [" + mode + "], Topic [" + annotatedTopic + "]"; if (params.hasKey(PARAM_HUB_LEASE_SECONDS)) { intentVerificationMessage = intentVerificationMessage.concat(", Lease Seconds [" + params.get(PARAM_HUB_LEASE_SECONDS) + "]"); } console.println(intentVerificationMessage); } else { console.println("ballerina: Intent Verification denied - Mode [" + mode + "], Topic [" + params.get(PARAM_HUB_TOPIC).stringValue() + "]"); response.setProperty(HttpConstants.HTTP_STATUS_CODE, HttpResponseStatus.NOT_FOUND.code()); response.addHttpContent(new DefaultLastHttpContent()); } HttpUtil.sendOutboundResponse(httpCarbonMessage, response); } catch (UnsupportedEncodingException e) { throw new BallerinaConnectorException("Error responding to intent verification request: " + e.getMessage()); } } } }
Here you can easily use TypeTags for type comparisons.
private static void checkRetryStmtValidity(RetryStmt stmt) { StatementKind parentStmtType = stmt.getParent().getKind(); if (StatementKind.FAILED_BLOCK != parentStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.INVALID_RETRY_STMT_LOCATION); } Expression retryCountExpr = stmt.getRetryCountExpression(); boolean error = true; if (retryCountExpr instanceof BasicLiteral) { if (TypeConstants.INT_TNAME.equals(((BasicLiteral) retryCountExpr).getTypeName().getName())) { if (((BasicLiteral) retryCountExpr).getBValue().intValue() >= 0) { error = false; } } } else if (retryCountExpr instanceof VariableReferenceExpr) { VariableDef variableDef = ((SimpleVarRefExpr) retryCountExpr).getVariableDef(); if (variableDef.getKind() == VariableDef.Kind.CONSTANT) { if (TypeConstants.INT_TNAME.equals(variableDef.getTypeName().getName())) { error = false; } } } if (error) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.INVALID_RETRY_COUNT); } }
if (TypeConstants.INT_TNAME.equals(((BasicLiteral) retryCountExpr).getTypeName().getName())) {
private static void checkRetryStmtValidity(RetryStmt stmt) { StatementKind parentStmtType = stmt.getParent().getKind(); if (StatementKind.FAILED_BLOCK != parentStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.INVALID_RETRY_STMT_LOCATION); } Expression retryCountExpr = stmt.getRetryCountExpression(); boolean error = true; if (retryCountExpr instanceof BasicLiteral) { if (retryCountExpr.getType().getTag() == TypeTags.INT_TAG) { if (((BasicLiteral) retryCountExpr).getBValue().intValue() >= 0) { error = false; } } } else if (retryCountExpr instanceof VariableReferenceExpr) { VariableDef variableDef = ((SimpleVarRefExpr) retryCountExpr).getVariableDef(); if (variableDef.getKind() == VariableDef.Kind.CONSTANT) { if (variableDef.getType().getTag() == TypeTags.INT_TAG) { error = false; } } } if (error) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.INVALID_RETRY_COUNT); } }
class SemanticAnalyzer implements NodeVisitor { private static final String ERRORS_PACKAGE = "ballerina.lang.errors"; private static final String BALLERINA_CAST_ERROR = "TypeCastError"; private static final String BALLERINA_CONVERSION_ERROR = "TypeConversionError"; private static final String BALLERINA_ERROR = "Error"; private String currentPkg; private CallableUnit currentCallableUnit = null; private Stack<CallableUnit> parentCallableUnit = new Stack<>(); private Stack<SymbolScope> parentScope = new Stack<>(); private int whileStmtCount = 0; private int transactionStmtCount = 0; private int failedBlockCount = 0; private boolean isWithinWorker = false; private SymbolScope currentScope; private SymbolScope currentPackageScope; private SymbolScope nativeScope; private BlockStmt.BlockStmtBuilder pkgInitFuncStmtBuilder; public SemanticAnalyzer(BLangProgram programScope) { currentScope = programScope; this.nativeScope = programScope.getNativeScope(); } @Override public void visit(BLangProgram bLangProgram) { BLangPackage entryPkg = bLangProgram.getEntryPackage(); if (entryPkg != null) { entryPkg.accept(this); } else { BLangPackage[] blangPackages = bLangProgram.getLibraryPackages(); for (BLangPackage bLangPackage : blangPackages) { bLangPackage.accept(this); } } } @Override public void visit(BLangPackage bLangPackage) { BLangPackage[] dependentPackages = bLangPackage.getDependentPackages(); List<BallerinaFunction> initFunctionList = new ArrayList<>(); for (BLangPackage dependentPkg : dependentPackages) { if (dependentPkg.isSymbolsDefined()) { continue; } dependentPkg.accept(this); initFunctionList.add(dependentPkg.getInitFunction()); } currentScope = bLangPackage; currentPackageScope = currentScope; currentPkg = bLangPackage.getPackagePath(); NodeLocation pkgLocation = bLangPackage.getNodeLocation(); if (pkgLocation == null) { BallerinaFile[] ballerinaFiles = bLangPackage.getBallerinaFiles(); String filename = ballerinaFiles.length == 0 ? "" : ballerinaFiles[0].getFileName(); pkgLocation = new NodeLocation("", filename, 0); } BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(bLangPackage); functionBuilder.setNodeLocation(pkgLocation); functionBuilder.setIdentifier(new Identifier(bLangPackage.getPackagePath() + INIT_FUNCTION_SUFFIX)); functionBuilder.setPkgPath(bLangPackage.getPackagePath()); pkgInitFuncStmtBuilder = new BlockStmt.BlockStmtBuilder(bLangPackage.getNodeLocation(), bLangPackage); addDependentPkgInitCalls(initFunctionList, pkgInitFuncStmtBuilder, pkgLocation); defineStructs(bLangPackage.getStructDefs()); defineConnectors(bLangPackage.getConnectors()); resolveStructFieldTypes(bLangPackage.getStructDefs()); defineFunctions(bLangPackage.getFunctions()); defineServices(bLangPackage.getServices()); defineAnnotations(bLangPackage.getAnnotationDefs()); for (CompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) { compilationUnit.accept(this); } ReturnStmt returnStmt = new ReturnStmt(pkgLocation, null, new Expression[0]); pkgInitFuncStmtBuilder.addStmt(returnStmt); pkgInitFuncStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(pkgInitFuncStmtBuilder.build()); BallerinaFunction initFunction = functionBuilder.buildFunction(); initFunction.setReturnParamTypes(new BType[0]); bLangPackage.setInitFunction(initFunction); bLangPackage.setSymbolsDefined(true); } @Override public void visit(BallerinaFile bFile) { } @Override public void visit(ImportPackage importPkg) { } @Override public void visit(ConstDef constDef) { VariableDefStmt variableDefStmt = constDef.getVariableDefStmt(); variableDefStmt.getVariableDef().setKind(VariableDef.Kind.CONSTANT); variableDefStmt.accept(this); for (AnnotationAttachment annotationAttachment : constDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.CONSTANT, null)); annotationAttachment.accept(this); } SimpleVarRefExpr varRefExpr = new SimpleVarRefExpr(constDef.getNodeLocation(), constDef.getWhiteSpaceDescriptor(), constDef.getName(), null, null); varRefExpr.setVariableDef(constDef); AssignStmt assignStmt = new AssignStmt(constDef.getNodeLocation(), new Expression[]{varRefExpr}, variableDefStmt.getRExpr()); pkgInitFuncStmtBuilder.addStmt(assignStmt); } @Override public void visit(GlobalVariableDef globalVarDef) { VariableDefStmt variableDefStmt = globalVarDef.getVariableDefStmt(); variableDefStmt.getVariableDef().setKind(VariableDef.Kind.GLOBAL_VAR); variableDefStmt.accept(this); if (variableDefStmt.getRExpr() != null) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); pkgInitFuncStmtBuilder.addStmt(assignStmt); } } @Override public void visit(Service service) { openScope(service); for (AnnotationAttachment annotationAttachment : service.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.SERVICE, service.getProtocolPkgPath())); annotationAttachment.accept(this); } for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) { variableDefStmt.getVariableDef().setKind(VariableDef.Kind.SERVICE_VAR); variableDefStmt.accept(this); } createServiceInitFunction(service); for (Resource resource : service.getResources()) { resource.accept(this); } closeScope(); } @Override public void visit(BallerinaConnectorDef connectorDef) { openScope(connectorDef); if (connectorDef.isFilterConnector()) { BType type = BTypes.resolveType(connectorDef.getFilterSupportedType(), currentScope, connectorDef.getNodeLocation()); if (type != null) { if (type instanceof BallerinaConnectorDef) { connectorDef.setFilteredType(type); BallerinaConnectorDef filterConnector = (BallerinaConnectorDef) type; if (!filterConnector.equals(connectorDef)) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.CONNECTOR_TYPES_NOT_EQUIVALENT, connectorDef.getName(), filterConnector.getName()); } } else { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.FILTER_CONNECTOR_MUST_BE_A_CONNECTOR, type.getName()); } } else { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.UNDEFINED_CONNECTOR, connectorDef.getFilterSupportedType()); } } for (AnnotationAttachment annotationAttachment : connectorDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.CONNECTOR, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : connectorDef.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.CONNECTOR_VAR); parameterDef.accept(this); } for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) { variableDefStmt.getVariableDef().setKind(VariableDef.Kind.CONNECTOR_VAR); variableDefStmt.accept(this); } createConnectorInitFunction(connectorDef); for (BallerinaAction action : connectorDef.getActions()) { action.accept(this); } closeScope(); } @Override public void visit(Resource resource) { openScope(resource); currentCallableUnit = resource; for (AnnotationAttachment annotationAttachment : resource.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.RESOURCE, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : resource.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (Worker worker : resource.getWorkers()) { addWorkerSymbol(worker); visit(worker); } BlockStmt blockStmt = resource.getResourceBody(); blockStmt.accept(this); checkAndAddReplyStmt(blockStmt); resolveWorkerInteractions(resource); currentCallableUnit = null; closeScope(); } private void buildWorkerInteractions(CallableUnit callableUnit, Worker[] workers, boolean isWorkerInWorker, boolean isForkJoinStmt) { Map<String, WorkerDataChannel> workerDataChannels = new HashMap<>(); boolean statementCompleted = false; List<Statement> processedStatements = new ArrayList<>(); if (callableUnit.getWorkerInteractionStatements() != null && !callableUnit.getWorkerInteractionStatements().isEmpty()) { String sourceWorkerName; String targetWorkerName; for (Statement statement : callableUnit.getWorkerInteractionStatements()) { statementCompleted = false; if (statement instanceof WorkerInvocationStmt) { targetWorkerName = ((WorkerInvocationStmt) statement).getName(); if (targetWorkerName == "fork" && isForkJoinStmt) { break; } if (callableUnit instanceof Worker) { sourceWorkerName = callableUnit.getName(); } else { sourceWorkerName = "default"; } for (Worker worker : workers) { if (statementCompleted) { break; } Statement[] workerInteractions = worker.getWorkerInteractionStatements(). toArray(new Statement[worker.getWorkerInteractionStatements().size()]); for (Statement workerInteraction : workerInteractions) { if (workerInteraction instanceof WorkerReplyStmt) { String complimentSourceWorkerName = ((WorkerReplyStmt) workerInteraction). getWorkerName(); String complimentTargetWorkerName = worker.getName(); if (sourceWorkerName.equals(complimentSourceWorkerName) && targetWorkerName.equals(complimentTargetWorkerName)) { Expression[] invokeParams = ((WorkerInvocationStmt) statement).getExpressionList(); Expression[] receiveParams = ((WorkerReplyStmt) workerInteraction). getExpressionList(); if (invokeParams.length != receiveParams.length) { break; } else { int i = 0; for (Expression invokeParam : invokeParams) { if (!(receiveParams[i++].getType().equals(invokeParam.getType()))) { break; } } } String interactionName = sourceWorkerName + "->" + targetWorkerName; WorkerDataChannel workerDataChannel; if (!workerDataChannels.containsKey(interactionName)) { workerDataChannel = new WorkerDataChannel(sourceWorkerName, targetWorkerName); workerDataChannels.put(interactionName, workerDataChannel); } else { workerDataChannel = workerDataChannels.get(interactionName); } ((WorkerInvocationStmt) statement).setWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) workerInteraction). setWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) workerInteraction). setEnclosingCallableUnitName(callableUnit.getName()); callableUnit.addWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) statement).setEnclosingCallableUnitName( callableUnit.getName()); ((WorkerInvocationStmt) statement).setPackagePath(callableUnit.getPackagePath()); worker.getWorkerInteractionStatements().remove(workerInteraction); processedStatements.add(statement); statementCompleted = true; break; } } } } } else { sourceWorkerName = ((WorkerReplyStmt) statement).getWorkerName(); if (callableUnit instanceof Worker) { targetWorkerName = callableUnit.getName(); } else { targetWorkerName = "default"; } for (Worker worker : callableUnit.getWorkers()) { if (statementCompleted) { break; } Statement[] workerInteractions = worker.getWorkerInteractionStatements(). toArray(new Statement[worker.getWorkerInteractionStatements().size()]); for (Statement workerInteraction : workerInteractions) { if (workerInteraction instanceof WorkerInvocationStmt) { String complimentTargetWorkerName = ((WorkerInvocationStmt) workerInteraction). getName(); String complimentSourceWorkerName = worker.getName(); if (sourceWorkerName.equals(complimentSourceWorkerName) && targetWorkerName.equals(complimentTargetWorkerName)) { Expression[] invokeParams = ((WorkerReplyStmt) statement).getExpressionList(); Expression[] receiveParams = ((WorkerInvocationStmt) workerInteraction). getExpressionList(); if (invokeParams.length != receiveParams.length) { break; } else { int i = 0; for (Expression invokeParam : invokeParams) { if (!(receiveParams[i++].getType().equals(invokeParam.getType()))) { break; } } } String interactionName = sourceWorkerName + "->" + targetWorkerName; WorkerDataChannel workerDataChannel; if (!workerDataChannels.containsKey(interactionName)) { workerDataChannel = new WorkerDataChannel(sourceWorkerName, targetWorkerName); workerDataChannels.put(interactionName, workerDataChannel); } else { workerDataChannel = workerDataChannels.get(interactionName); } ((WorkerReplyStmt) statement).setWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) workerInteraction). setWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) workerInteraction). setEnclosingCallableUnitName(callableUnit.getName()); callableUnit.addWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) statement).setEnclosingCallableUnitName(callableUnit.getName()); ((WorkerReplyStmt) statement).setPackagePath(callableUnit.getPackagePath()); worker.getWorkerInteractionStatements().remove(workerInteraction); processedStatements.add(statement); statementCompleted = true; break; } } } } } if (!statementCompleted && !isWorkerInWorker) { BLangExceptionHelper.throwSemanticError(statement, SemanticErrors.WORKER_INTERACTION_NOT_VALID); } } callableUnit.getWorkerInteractionStatements().removeAll(processedStatements); } } private void resolveWorkerInteractions(CallableUnit callableUnit) { boolean isWorkerInWorker = callableUnit instanceof Worker; boolean isForkJoinStmt = callableUnit instanceof ForkJoinStmt; Worker[] workers = callableUnit.getWorkers(); if (workers.length > 0) { Worker[] tempWorkers = new Worker[workers.length]; System.arraycopy(workers, 0, tempWorkers, 0, tempWorkers.length); int i = 0; do { buildWorkerInteractions(callableUnit, tempWorkers, isWorkerInWorker, isForkJoinStmt); callableUnit = workers[i]; i++; System.arraycopy(workers, i, tempWorkers, 0, workers.length - i); } while (i < workers.length); } } private void resolveForkJoin(ForkJoinStmt forkJoinStmt) { Worker[] workers = forkJoinStmt.getWorkers(); if (workers != null && workers.length > 0) { for (Worker worker : workers) { for (Statement statement : worker.getWorkerInteractionStatements()) { if (statement instanceof WorkerInvocationStmt) { String targetWorkerName = ((WorkerInvocationStmt) statement).getName(); if (targetWorkerName.equalsIgnoreCase("fork")) { String sourceWorkerName = worker.getName(); WorkerDataChannel workerDataChannel = new WorkerDataChannel (sourceWorkerName, targetWorkerName); ((WorkerInvocationStmt) statement).setWorkerDataChannel(workerDataChannel); currentCallableUnit.addWorkerDataChannel(workerDataChannel); } } } } } } @Override public void visit(BallerinaFunction function) { openScope(function); currentCallableUnit = function; for (AnnotationAttachment annotationAttachment : function.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.FUNCTION, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : function.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (ParameterDef parameterDef : function.getReturnParameters()) { if (parameterDef.getName() != null) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); } parameterDef.accept(this); } if (!function.isNative()) { for (Worker worker : function.getWorkers()) { worker.accept(this); } BlockStmt blockStmt = function.getCallableUnitBody(); blockStmt.accept(this); if (function.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.MISSING_RETURN_STATEMENT); } checkAndAddReturnStmt(function.getReturnParamTypes().length, blockStmt); } resolveWorkerInteractions(function); currentCallableUnit = null; closeScope(); } @Override public void visit(BTypeMapper typeMapper) { } @Override public void visit(BallerinaAction action) { openScope(action); currentCallableUnit = action; for (AnnotationAttachment annotationAttachment : action.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.ACTION, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : action.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } if (action.getParameterDefs().length < 1 || action.getParameterDefs()[0].getType() != action.getConnectorDef()) { BLangExceptionHelper.throwSemanticError(action, SemanticErrors.INVALID_ACTION_FIRST_PARAMETER, action.getConnectorDef()); } for (ParameterDef parameterDef : action.getReturnParameters()) { if (parameterDef.getName() != null) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); } parameterDef.accept(this); } if (!action.isNative()) { for (Worker worker : action.getWorkers()) { worker.accept(this); } BlockStmt blockStmt = action.getCallableUnitBody(); blockStmt.accept(this); if (action.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) { BLangExceptionHelper.throwSemanticError(action, SemanticErrors.MISSING_RETURN_STATEMENT); } checkAndAddReturnStmt(action.getReturnParameters().length, blockStmt); } resolveWorkerInteractions(action); currentCallableUnit = null; closeScope(); } @Override public void visit(Worker worker) { parentScope.push(currentScope); currentScope = worker; parentCallableUnit.push(currentCallableUnit); currentCallableUnit = worker; for (ParameterDef parameterDef : worker.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (ParameterDef parameterDef : worker.getReturnParameters()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (Worker worker2 : worker.getWorkers()) { addWorkerSymbol(worker2); worker2.accept(this); } BlockStmt blockStmt = worker.getCallableUnitBody(); isWithinWorker = true; blockStmt.accept(this); isWithinWorker = false; currentCallableUnit = parentCallableUnit.pop(); currentScope = parentScope.pop(); } private void addWorkerSymbol(Worker worker) { SymbolName symbolName = worker.getSymbolName(); BLangSymbol varSymbol = currentScope.resolve(symbolName); if (varSymbol != null) { BLangExceptionHelper.throwSemanticError(worker, SemanticErrors.REDECLARED_SYMBOL, worker.getName()); } currentScope.define(symbolName, worker); } @Override public void visit(StructDef structDef) { for (AnnotationAttachment annotationAttachment : structDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.STRUCT, null)); annotationAttachment.accept(this); } } @Override public void visit(AnnotationAttachment annotation) { AnnotationAttachmentPoint attachedPoint = annotation.getAttachedPoint(); SymbolName annotationSymName = new SymbolName(annotation.getName(), annotation.getPkgPath()); BLangSymbol annotationSymbol = currentScope.resolve(annotationSymName); if (!(annotationSymbol instanceof AnnotationDef)) { BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.UNDEFINED_ANNOTATION, annotationSymName); } AnnotationDef annotationDef = (AnnotationDef) annotationSymbol; if (annotationDef.getAttachmentPoints() != null && annotationDef.getAttachmentPoints().length > 0) { Optional<AnnotationAttachmentPoint> matchingAttachmentPoint = Arrays .stream(annotationDef.getAttachmentPoints()) .filter(attachmentPoint -> attachmentPoint.equals(attachedPoint)) .findAny(); if (!matchingAttachmentPoint.isPresent()) { String msg = attachedPoint.getAttachmentPoint().getValue(); if (attachedPoint.getPkgPath() != null) { msg = attachedPoint.getAttachmentPoint().getValue() + "<" + attachedPoint.getPkgPath() + ">"; } throw BLangExceptionHelper.getSemanticError(annotation.getNodeLocation(), SemanticErrors.ANNOTATION_NOT_ALLOWED, annotationSymName, msg); } } validateAttributes(annotation, annotationDef); populateDefaultValues(annotation, annotationDef); } /** * Visit and validate attributes of an annotation attachment. * * @param annotation Annotation attachment to validate attributes * @param annotationDef Definition of the annotation */ private void validateAttributes(AnnotationAttachment annotation, AnnotationDef annotationDef) { annotation.getAttributeNameValuePairs().forEach((attributeName, attributeValue) -> { BLangSymbol attributeSymbol = annotationDef.resolveMembers(new SymbolName(attributeName)); if (attributeSymbol == null || !(attributeSymbol instanceof AnnotationAttributeDef)) { BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.NO_SUCH_ATTRIBUTE, attributeName, annotation.getName()); } AnnotationAttributeDef attributeDef = ((AnnotationAttributeDef) attributeSymbol); SimpleTypeName attributeType = attributeDef.getTypeName(); if (attributeValue.getVarRefExpr() != null) { SimpleVarRefExpr varRefExpr = attributeValue.getVarRefExpr(); visitSingleValueExpr(varRefExpr); if (!(varRefExpr.getVariableDef() instanceof ConstDef)) { throw BLangExceptionHelper.getSemanticError(attributeValue.getNodeLocation(), SemanticErrors.ATTRIBUTE_VAL_CANNOT_REFER_NON_CONST); } attributeValue.setType(varRefExpr.getType()); BType lhsType = BTypes.resolveType(attributeType, currentScope, annotation.getNodeLocation()); if (lhsType != varRefExpr.getType()) { throw BLangExceptionHelper.getSemanticError(attributeValue.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, lhsType, varRefExpr.getType()); } return; } SimpleTypeName valueType = attributeValue.getTypeName(); BLangSymbol valueTypeSymbol = currentScope.resolve(valueType.getSymbolName()); BLangSymbol attributeTypeSymbol = annotationDef.resolve(new SymbolName(attributeType.getName(), attributeType.getPackagePath())); if (attributeType.isArrayType()) { if (!valueType.isArrayType()) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName() + TypeConstants.ARRAY_TNAME, valueTypeSymbol.getSymbolName()); } AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray(); for (AnnotationAttributeValue value : valuesArray) { valueTypeSymbol = currentScope.resolve(value.getTypeName().getSymbolName()); if (attributeTypeSymbol != valueTypeSymbol) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName()); } AnnotationAttachment childAnnotation = value.getAnnotationValue(); if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) { validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol); } } } else { if (valueType.isArrayType()) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES_ARRAY_FOUND, attributeTypeSymbol.getName()); } if (attributeTypeSymbol != valueTypeSymbol) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName()); } AnnotationAttachment childAnnotation = attributeValue.getAnnotationValue(); if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) { validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol); } } }); } /** * Populate default values to the annotation attributes. * * @param annotation Annotation attachment to populate default values * @param annotationDef Definition of the annotation corresponds to the provided annotation attachment */ private void populateDefaultValues(AnnotationAttachment annotation, AnnotationDef annotationDef) { Map<String, AnnotationAttributeValue> attributeValPairs = annotation.getAttributeNameValuePairs(); for (AnnotationAttributeDef attributeDef : annotationDef.getAttributeDefs()) { String attributeName = attributeDef.getName(); if (!attributeValPairs.containsKey(attributeName)) { BasicLiteral defaultValue = attributeDef.getAttributeValue(); if (defaultValue != null) { annotation.addAttributeNameValuePair(attributeName, new AnnotationAttributeValue(defaultValue.getBValue(), defaultValue.getTypeName(), null, null)); } continue; } AnnotationAttributeValue attributeValue = attributeValPairs.get(attributeName); if (attributeValue.getVarRefExpr() != null) { continue; } SimpleTypeName valueType = attributeValue.getTypeName(); if (valueType.isArrayType()) { AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray(); for (AnnotationAttributeValue value : valuesArray) { AnnotationAttachment annotationTypeVal = value.getAnnotationValue(); if (annotationTypeVal == null) { continue; } SimpleTypeName attributeType = attributeDef.getTypeName(); BLangSymbol attributeTypeSymbol = annotationDef.resolve( new SymbolName(attributeType.getName(), attributeType.getPackagePath())); if (attributeTypeSymbol instanceof AnnotationDef) { populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol); } } } else { AnnotationAttachment annotationTypeVal = attributeValue.getAnnotationValue(); if (annotationTypeVal == null) { continue; } BLangSymbol attributeTypeSymbol = annotationDef.resolve(attributeDef.getTypeName().getSymbolName()); if (attributeTypeSymbol instanceof AnnotationDef) { populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol); } } } } @Override public void visit(AnnotationAttributeDef annotationAttributeDef) { SimpleTypeName fieldType = annotationAttributeDef.getTypeName(); BasicLiteral fieldVal = annotationAttributeDef.getAttributeValue(); if (fieldVal != null) { fieldVal.accept(this); BType valueType = fieldVal.getType(); if (!BTypes.isBuiltInTypeName(fieldType.getName())) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE); } BLangSymbol typeSymbol = currentScope.resolve(fieldType.getSymbolName()); BType fieldBType = (BType) typeSymbol; if (!BTypes.isValueType(fieldBType)) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE); } if (fieldBType != valueType) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, fieldType, fieldVal.getTypeName()); } } else { BLangSymbol typeSymbol; if (fieldType.isArrayType()) { typeSymbol = currentScope.resolve(new SymbolName(fieldType.getName(), fieldType.getPackagePath())); } else { typeSymbol = currentScope.resolve(fieldType.getSymbolName()); } if (((typeSymbol instanceof BType) && !BTypes.isValueType((BType) typeSymbol)) || (!(typeSymbol instanceof BType) && !(typeSymbol instanceof AnnotationDef))) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_ATTRIBUTE_TYPE, fieldType); } if (!(typeSymbol instanceof BType)) { fieldType.setPkgPath(annotationAttributeDef.getPackagePath()); } } } @Override public void visit(AnnotationDef annotationDef) { for (AnnotationAttributeDef fields : annotationDef.getAttributeDefs()) { fields.accept(this); } for (AnnotationAttachment annotationAttachment : annotationDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.ANNOTATION, null)); annotationAttachment.accept(this); } } @Override public void visit(ParameterDef paramDef) { BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); if (paramDef.getAnnotations() == null) { return; } for (AnnotationAttachment annotationAttachment : paramDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.PARAMETER, null)); annotationAttachment.accept(this); } } @Override public void visit(SimpleVariableDef varDef) { } @Override public void visit(VariableDefStmt varDefStmt) { VariableDef varDef = varDefStmt.getVariableDef(); BType lhsType = BTypes.resolveType(varDef.getTypeName(), currentScope, varDef.getNodeLocation()); varDef.setType(lhsType); if (varDef.getKind() == null) { varDef.setKind(VariableDef.Kind.LOCAL_VAR); } ((VariableReferenceExpr) varDefStmt.getLExpr()).setLHSExpr(true); SymbolName symbolName = new SymbolName(varDef.getName(), currentPkg); BLangSymbol varSymbol = currentScope.resolve(symbolName); if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { BLangExceptionHelper.throwSemanticError(varDef, SemanticErrors.REDECLARED_SYMBOL, varDef.getName()); } currentScope.define(symbolName, varDef); Expression rExpr = varDefStmt.getRExpr(); if (rExpr == null) { return; } if (rExpr instanceof RefTypeInitExpr) { RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lhsType); varDefStmt.setRExpr(refTypeInitExpr); refTypeInitExpr.accept(this); return; } BType rhsType; if (rExpr instanceof ExecutableMultiReturnExpr) { rExpr.accept(this); ExecutableMultiReturnExpr multiReturnExpr = (ExecutableMultiReturnExpr) rExpr; BType[] returnTypes = multiReturnExpr.getTypes(); if (returnTypes.length != 1) { BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, "1", returnTypes.length); } rhsType = returnTypes[0]; } else { visitSingleValueExpr(rExpr); rhsType = rExpr.getType(); } AssignabilityResult result = performAssignabilityCheck(lhsType, rExpr); if (result.expression != null) { varDefStmt.setRExpr(result.expression); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lhsType); } } @Override public void visit(AssignStmt assignStmt) { Expression[] lExprs = assignStmt.getLExprs(); visitLExprsOfAssignment(assignStmt, lExprs); Expression rExpr = assignStmt.getRExpr(); if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) { rExpr.accept(this); if (assignStmt.isDeclaredWithVar()) { assignVariableRefTypes(lExprs, ((CallableUnitInvocationExpr) rExpr).getTypes()); } checkForMultiAssignmentErrors(assignStmt, lExprs, (CallableUnitInvocationExpr) rExpr); return; } if (lExprs.length > 1 && (rExpr instanceof TypeCastExpression || rExpr instanceof TypeConversionExpr)) { ((AbstractExpression) rExpr).setMultiReturnAvailable(true); rExpr.accept(this); if (assignStmt.isDeclaredWithVar()) { assignVariableRefTypes(lExprs, ((ExecutableMultiReturnExpr) rExpr).getTypes()); } checkForMultiValuedCastingErrors(assignStmt, lExprs, (ExecutableMultiReturnExpr) rExpr); return; } Expression lExpr = assignStmt.getLExprs()[0]; BType lhsType = lExpr.getType(); if (rExpr instanceof RefTypeInitExpr) { if (assignStmt.isDeclaredWithVar()) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INVALID_VAR_ASSIGNMENT); } RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lhsType); assignStmt.setRExpr(refTypeInitExpr); refTypeInitExpr.accept(this); return; } visitSingleValueExpr(rExpr); BType rhsType = rExpr.getType(); if (assignStmt.isDeclaredWithVar()) { ((SimpleVarRefExpr) lExpr).getVariableDef().setType(rhsType); lhsType = rhsType; } AssignabilityResult result = performAssignabilityCheck(lhsType, rExpr); if (result.expression != null) { assignStmt.setRExpr(result.expression); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lhsType); } } @Override public void visit(BlockStmt blockStmt) { openScope(blockStmt); for (int stmtIndex = 0; stmtIndex < blockStmt.getStatements().length; stmtIndex++) { Statement stmt = blockStmt.getStatements()[stmtIndex]; if (stmt instanceof BreakStmt && whileStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.BREAK_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof ContinueStmt && whileStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.CONTINUE_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof AbortStmt && transactionStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.ABORT_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof RetryStmt && failedBlockCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.RETRY_STMT_NOT_ALLOWED_HERE); } if (isWithinWorker) { if (stmt instanceof ReplyStmt) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.REPLY_STMT_NOT_ALLOWED_HERE); } } if (stmt instanceof BreakStmt || stmt instanceof ContinueStmt || stmt instanceof ReplyStmt || stmt instanceof AbortStmt || stmt instanceof RetryStmt) { checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1); } stmt.accept(this); if (stmt.isAlwaysReturns()) { checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1); blockStmt.setAlwaysReturns(true); } } closeScope(); } @Override public void visit(CommentStmt commentStmt) { } @Override public void visit(IfElseStmt ifElseStmt) { boolean stmtReturns = true; Expression expr = ifElseStmt.getCondition(); visitSingleValueExpr(expr); if (expr.getType() != BTypes.typeBoolean) { BLangExceptionHelper .throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType()); } Statement thenBody = ifElseStmt.getThenBody(); thenBody.accept(this); stmtReturns &= thenBody.isAlwaysReturns(); for (IfElseStmt.ElseIfBlock elseIfBlock : ifElseStmt.getElseIfBlocks()) { Expression elseIfCondition = elseIfBlock.getElseIfCondition(); visitSingleValueExpr(elseIfCondition); if (elseIfCondition.getType() != BTypes.typeBoolean) { BLangExceptionHelper.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, elseIfCondition.getType()); } Statement elseIfBody = elseIfBlock.getElseIfBody(); elseIfBody.accept(this); stmtReturns &= elseIfBody.isAlwaysReturns(); } Statement elseBody = ifElseStmt.getElseBody(); if (elseBody != null) { elseBody.accept(this); stmtReturns &= elseBody.isAlwaysReturns(); } else { stmtReturns = false; } ifElseStmt.setAlwaysReturns(stmtReturns); } @Override public void visit(WhileStmt whileStmt) { whileStmtCount++; Expression expr = whileStmt.getCondition(); visitSingleValueExpr(expr); if (expr.getType() != BTypes.typeBoolean) { BLangExceptionHelper .throwSemanticError(whileStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType()); } BlockStmt blockStmt = whileStmt.getBody(); if (blockStmt.getStatements().length == 0) { BLangExceptionHelper.throwSemanticError(blockStmt, SemanticErrors.NO_STATEMENTS_WHILE_LOOP); } blockStmt.accept(this); whileStmtCount--; } @Override public void visit(BreakStmt breakStmt) { checkParent(breakStmt); } @Override public void visit(ContinueStmt continueStmt) { checkParent(continueStmt); } @Override public void visit(TryCatchStmt tryCatchStmt) { tryCatchStmt.getTryBlock().accept(this); BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_ERROR, ERRORS_PACKAGE)); Set<BType> definedTypes = new HashSet<>(); if (tryCatchStmt.getCatchBlocks().length != 0) { if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(tryCatchStmt, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_ERROR); } } for (TryCatchStmt.CatchBlock catchBlock : tryCatchStmt.getCatchBlocks()) { catchBlock.getParameterDef().setKind(VariableDef.Kind.LOCAL_VAR); catchBlock.getParameterDef().accept(this); if (!error.equals(catchBlock.getParameterDef().getType()) && (!(catchBlock.getParameterDef().getType() instanceof StructDef) || TypeLattice.getExplicitCastLattice().getEdgeFromTypes(catchBlock.getParameterDef() .getType(), error, null) == null)) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( catchBlock.getCatchBlockStmt().getNodeLocation(), SemanticErrors.ONLY_ERROR_TYPE_ALLOWED_HERE)); } if (!definedTypes.add(catchBlock.getParameterDef().getType())) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( catchBlock.getCatchBlockStmt().getNodeLocation(), SemanticErrors.DUPLICATED_ERROR_CATCH, catchBlock.getParameterDef().getTypeName())); } catchBlock.getCatchBlockStmt().accept(this); } if (tryCatchStmt.getFinallyBlock() != null) { tryCatchStmt.getFinallyBlock().getFinallyBlockStmt().accept(this); } } @Override public void visit(ThrowStmt throwStmt) { throwStmt.getExpr().accept(this); BType expressionType = null; if (throwStmt.getExpr() instanceof SimpleVarRefExpr && throwStmt.getExpr().getType() instanceof StructDef) { expressionType = throwStmt.getExpr().getType(); } else if (throwStmt.getExpr() instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) throwStmt.getExpr(); if (!funcIExpr.isMultiReturnExpr() && funcIExpr.getTypes().length == 1 && funcIExpr.getTypes()[0] instanceof StructDef) { expressionType = funcIExpr.getTypes()[0]; } } if (expressionType != null) { BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_ERROR, ERRORS_PACKAGE)); if (error == null) { BLangExceptionHelper.throwSemanticError(throwStmt, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_ERROR); } if (error.equals(expressionType) || TypeLattice.getExplicitCastLattice().getEdgeFromTypes (expressionType, error, null) != null) { throwStmt.setAlwaysReturns(true); return; } } throw new SemanticException(BLangExceptionHelper.constructSemanticError( throwStmt.getNodeLocation(), SemanticErrors.ONLY_ERROR_TYPE_ALLOWED_HERE)); } @Override public void visit(FunctionInvocationStmt functionInvocationStmt) { functionInvocationStmt.getFunctionInvocationExpr().accept(this); } @Override public void visit(ActionInvocationStmt actionInvocationStmt) { actionInvocationStmt.getActionInvocationExpr().accept(this); } @Override public void visit(WorkerInvocationStmt workerInvocationStmt) { Expression[] expressions = workerInvocationStmt.getExpressionList(); BType[] bTypes = new BType[expressions.length]; int p = 0; for (Expression expression : expressions) { expression.accept(this); bTypes[p++] = expression.getType(); } workerInvocationStmt.setTypes(bTypes); if (workerInvocationStmt.getCallableUnitName() != null && !workerInvocationStmt.getCallableUnitName().equals("default") && !workerInvocationStmt.getCallableUnitName().equals("fork")) { linkWorker(workerInvocationStmt); } } @Override public void visit(WorkerReplyStmt workerReplyStmt) { String workerName = workerReplyStmt.getWorkerName(); SymbolName workerSymbol = new SymbolName(workerName); Expression[] expressions = workerReplyStmt.getExpressionList(); BType[] bTypes = new BType[expressions.length]; int p = 0; for (Expression expression : expressions) { expression.accept(this); bTypes[p++] = expression.getType(); } workerReplyStmt.setTypes(bTypes); if (!workerName.equals("default")) { BLangSymbol worker = currentScope.resolve(workerSymbol); if (!(worker instanceof Worker)) { BLangExceptionHelper.throwSemanticError(expressions[0], SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, workerSymbol); } workerReplyStmt.setWorker((Worker) worker); } } @Override public void visit(ForkJoinStmt forkJoinStmt) { boolean stmtReturns = true; openScope(forkJoinStmt); for (Worker worker : forkJoinStmt.getWorkers()) { worker.accept(this); } ForkJoinStmt.Join join = forkJoinStmt.getJoin(); openScope(join); ParameterDef parameter = join.getJoinResult(); if (parameter != null) { parameter.setKind(VariableDef.Kind.LOCAL_VAR); parameter.accept(this); join.define(parameter.getSymbolName(), parameter); if (!(parameter.getType() instanceof BMapType)) { throw new SemanticException("Incompatible types: expected map in " + parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation(). getLineNumber()); } } Statement joinBody = join.getJoinBlock(); if (joinBody != null) { joinBody.accept(this); stmtReturns &= joinBody.isAlwaysReturns(); } closeScope(); ForkJoinStmt.Timeout timeout = forkJoinStmt.getTimeout(); openScope(timeout); Expression timeoutExpr = timeout.getTimeoutExpression(); if (timeoutExpr != null) { timeoutExpr.accept(this); } ParameterDef timeoutParam = timeout.getTimeoutResult(); if (timeoutParam != null) { timeoutParam.accept(this); timeout.define(timeoutParam.getSymbolName(), timeoutParam); if (!(parameter.getType() instanceof BMapType)) { throw new SemanticException("Incompatible types: expected map in " + parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation().getLineNumber()); } } Statement timeoutBody = timeout.getTimeoutBlock(); if (timeoutBody != null) { timeoutBody.accept(this); stmtReturns &= timeoutBody.isAlwaysReturns(); } resolveWorkerInteractions(forkJoinStmt); resolveForkJoin(forkJoinStmt); closeScope(); forkJoinStmt.setAlwaysReturns(stmtReturns); closeScope(); } @Override public void visit(TransactionStmt transactionStmt) { transactionStmtCount++; transactionStmt.getTransactionBlock().accept(this); transactionStmtCount--; TransactionStmt.FailedBlock failedBlock = transactionStmt.getFailedBlock(); if (failedBlock != null) { failedBlockCount++; failedBlock.getFailedBlockStmt().accept(this); failedBlockCount--; } TransactionStmt.AbortedBlock abortedBlock = transactionStmt.getAbortedBlock(); if (abortedBlock != null) { abortedBlock.getAbortedBlockStmt().accept(this); } TransactionStmt.CommittedBlock committedBlock = transactionStmt.getCommittedBlock(); if (committedBlock != null) { committedBlock.getCommittedBlockStmt().accept(this); } } @Override public void visit(AbortStmt abortStmt) { } @Override public void visit(RetryStmt retryStmt) { retryStmt.getRetryCountExpression().accept(this); checkRetryStmtValidity(retryStmt); } @Override public void visit(ReplyStmt replyStmt) { if (currentCallableUnit instanceof Function) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_FUNCTION); } else if (currentCallableUnit instanceof Action) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_ACTION); } if (replyStmt.getReplyExpr() instanceof ActionInvocationExpr) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_REPLY); } Expression replyExpr = replyStmt.getReplyExpr(); if (replyExpr != null) { visitSingleValueExpr(replyExpr); if (replyExpr.getType() != BTypes.typeMessage) { BLangExceptionHelper.throwSemanticError(replyExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeMessage, replyExpr.getType()); } } } @Override public void visit(ReturnStmt returnStmt) { if (currentCallableUnit instanceof Resource) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_RESOURCE); } if (transactionStmtCount > 0) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_TRANSACTION); } Expression[] returnArgExprs = returnStmt.getExprs(); ParameterDef[] returnParamsOfCU = currentCallableUnit.getReturnParameters(); if (returnArgExprs.length == 0 && returnParamsOfCU.length == 0) { return; } if (returnArgExprs.length == 0 && returnParamsOfCU[0].getName() != null) { Expression[] returnExprs = new Expression[returnParamsOfCU.length]; for (int i = 0; i < returnParamsOfCU.length; i++) { SimpleVarRefExpr variableRefExpr = new SimpleVarRefExpr(returnStmt.getNodeLocation(), returnStmt.getWhiteSpaceDescriptor(), returnParamsOfCU[i].getSymbolName().getName(), null, returnParamsOfCU[i].getSymbolName().getPkgPath()); visit(variableRefExpr); returnExprs[i] = variableRefExpr; } returnStmt.setExprs(returnExprs); return; } else if (returnArgExprs.length == 0) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } BType[] typesOfReturnExprs = new BType[returnArgExprs.length]; for (int i = 0; i < returnArgExprs.length; i++) { Expression returnArgExpr = returnArgExprs[i]; returnArgExpr.accept(this); typesOfReturnExprs[i] = returnArgExpr.getType(); } if (returnArgExprs.length == 1 && returnArgExprs[0] instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) returnArgExprs[0]; BType[] funcIExprReturnTypes = funcIExpr.getTypes(); if (funcIExprReturnTypes.length > returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN); } else if (funcIExprReturnTypes.length < returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } for (int i = 0; i < returnParamsOfCU.length; i++) { BType lhsType = returnParamsOfCU[i].getType(); BType rhsType = funcIExprReturnTypes[i]; if (isAssignableTo(lhsType, rhsType)) { continue; } BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, lhsType, rhsType); } return; } if (typesOfReturnExprs.length > returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN); } else if (typesOfReturnExprs.length < returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } else { for (int i = 0; i < returnParamsOfCU.length; i++) { if (returnArgExprs[i] instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = ((FunctionInvocationExpr) returnArgExprs[i]); if (funcIExpr.getTypes().length > 1) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT, funcIExpr.getCallableUnit().getName()); } } BType lhsType = returnParamsOfCU[i].getType(); BType rhsType = typesOfReturnExprs[i]; AssignabilityResult result = performAssignabilityCheck(lhsType, returnArgExprs[i]); if (result.expression != null) { returnArgExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, lhsType, rhsType); } } } } @Override public void visit(TransformStmt transformStmt) { BlockStmt blockStmt = transformStmt.getBody(); if (blockStmt.getStatements().length == 0) { BLangExceptionHelper.throwSemanticError(transformStmt, SemanticErrors.TRANSFORM_STATEMENT_NO_BODY); } blockStmt.accept(this); } @Override public void visit(InstanceCreationExpr instanceCreationExpr) { visitSingleValueExpr(instanceCreationExpr); if (BTypes.isValueType(instanceCreationExpr.getType())) { BLangExceptionHelper.throwSemanticError(instanceCreationExpr, SemanticErrors.CANNOT_USE_CREATE_FOR_VALUE_TYPES, instanceCreationExpr.getType()); } } @Override public void visit(FunctionInvocationExpr funcIExpr) { Expression[] exprs = funcIExpr.getArgExprs(); for (Expression expr : exprs) { visitSingleValueExpr(expr); } linkFunction(funcIExpr); if (funcIExpr.isFunctionPointerInvocation()) { BFunctionType type = (BFunctionType) funcIExpr.getFunctionPointerVariableDef().getType(); funcIExpr.setTypes(type.getReturnParameterType()); } else { BType[] returnParamTypes = funcIExpr.getCallableUnit().getReturnParamTypes(); funcIExpr.setTypes(returnParamTypes); } } @Override public void visit(ActionInvocationExpr actionIExpr) { String pkgPath = actionIExpr.getPackagePath(); String name = actionIExpr.getConnectorName(); SymbolName symbolName = new SymbolName(name, pkgPath); BLangSymbol bLangSymbol = currentScope.resolve(symbolName); if (bLangSymbol instanceof SimpleVariableDef) { if (((SimpleVariableDef) bLangSymbol).getType() instanceof StructDef) { StructDef structDef = (StructDef) ((SimpleVariableDef) bLangSymbol).getType(); VariableDef matchingVariableDef = null; for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) { VariableDef variableDef = variableDefStmt.getVariableDef(); if (variableDef.getType() instanceof BFunctionType && variableDef.getIdentifier().getName().equals(actionIExpr.getName())) { matchingVariableDef = variableDef; break; } } if (matchingVariableDef == null) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.UNDEFINED_FUNCTION, actionIExpr.getName()); } BFunctionType functionType = (BFunctionType) matchingVariableDef.getType(); Expression[] exprs = actionIExpr.getArgExprs(); if (exprs == null || functionType.getParameterType().length != exprs.length) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, actionIExpr.getName()); } for (Expression expr : exprs) { visitSingleValueExpr(expr); } for (int i = 0; i < exprs.length; i++) { if (!isAssignableTo(exprs[i].getType(), functionType.getParameterType()[i])) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, actionIExpr.getName()); } } actionIExpr.setTypes(functionType.getReturnParameterType()); actionIExpr.setFunctionInvocation(true); actionIExpr.setVariableDef((SimpleVariableDef) bLangSymbol); actionIExpr.setFieldDef(matchingVariableDef); return; } if (!(((SimpleVariableDef) bLangSymbol).getType() instanceof BallerinaConnectorDef)) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_ACTION_INVOCATION); } Expression[] exprs = new Expression[actionIExpr.getArgExprs().length + 1]; SimpleVarRefExpr variableRefExpr = new SimpleVarRefExpr(actionIExpr.getNodeLocation(), null, name, null, pkgPath); exprs[0] = variableRefExpr; for (int i = 0; i < actionIExpr.getArgExprs().length; i++) { exprs[i + 1] = actionIExpr.getArgExprs()[i]; } actionIExpr.setArgExprs(exprs); SimpleVariableDef varDef = (SimpleVariableDef) bLangSymbol; actionIExpr.setConnectorName(varDef.getTypeName().getName()); actionIExpr.setPackageName(varDef.getTypeName().getPackageName()); actionIExpr.setPackagePath(varDef.getTypeName().getPackagePath()); } else if (!(bLangSymbol instanceof BallerinaConnectorDef)) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INVALID_ACTION_INVOCATION); } Expression[] exprs = actionIExpr.getArgExprs(); for (Expression expr : exprs) { visitSingleValueExpr(expr); } linkAction(actionIExpr); BType[] returnParamTypes = actionIExpr.getCallableUnit().getReturnParamTypes(); actionIExpr.setTypes(returnParamTypes); } @Override public void visit(BasicLiteral basicLiteral) { BType bType = BTypes.resolveType(basicLiteral.getTypeName(), currentScope, basicLiteral.getNodeLocation()); basicLiteral.setType(bType); } @Override public void visit(DivideExpr divideExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(divideExpr); validateBinaryExprTypeForIntFloat(divideExpr, binaryExprType); } @Override public void visit(ModExpression modExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(modExpr); validateBinaryExprTypeForIntFloat(modExpr, binaryExprType); } @Override public void visit(UnaryExpression unaryExpr) { visitSingleValueExpr(unaryExpr.getRExpr()); unaryExpr.setType(unaryExpr.getRExpr().getType()); if (Operator.SUB.equals(unaryExpr.getOperator()) || Operator.ADD.equals(unaryExpr.getOperator())) { if (unaryExpr.getType() != BTypes.typeInt && unaryExpr.getType() != BTypes.typeFloat) { throwInvalidUnaryOpError(unaryExpr); } } else if (Operator.NOT.equals(unaryExpr.getOperator())) { if (unaryExpr.getType() != BTypes.typeBoolean) { throwInvalidUnaryOpError(unaryExpr); } } else if (Operator.TYPEOF.equals(unaryExpr.getOperator())) { unaryExpr.setType(BTypes.typeType); } else if (Operator.LENGTHOF.equals(unaryExpr.getOperator())) { BType rType = unaryExpr.getRExpr().getType(); if (!((rType instanceof BArrayType) || (rType == BTypes.typeJSON))) { throwInvalidUnaryOpError(unaryExpr); } unaryExpr.setType(BTypes.typeInt); } else { BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.UNKNOWN_OPERATOR_IN_UNARY, unaryExpr.getOperator()); } } @Override public void visit(AddExpression addExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(addExpr); if (binaryExprType != BTypes.typeInt && binaryExprType != BTypes.typeFloat && binaryExprType != BTypes.typeString && binaryExprType != BTypes.typeXML) { throwInvalidBinaryOpError(addExpr); } } @Override public void visit(MultExpression multExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(multExpr); validateBinaryExprTypeForIntFloat(multExpr, binaryExprType); } @Override public void visit(SubtractExpression subtractExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(subtractExpr); validateBinaryExprTypeForIntFloat(subtractExpr, binaryExprType); } @Override public void visit(AndExpression andExpr) { visitBinaryLogicalExpr(andExpr); } @Override public void visit(OrExpression orExpr) { visitBinaryLogicalExpr(orExpr); } @Override public void visit(EqualExpression equalExpr) { verifyBinaryEqualityExprType(equalExpr); } @Override public void visit(NotEqualExpression notEqualExpr) { verifyBinaryEqualityExprType(notEqualExpr); } @Override public void visit(GreaterEqualExpression greaterEqualExpr) { BType compareExprType = verifyBinaryCompareExprType(greaterEqualExpr); validateBinaryExprTypeForIntFloat(greaterEqualExpr, compareExprType); } @Override public void visit(GreaterThanExpression greaterThanExpr) { BType compareExprType = verifyBinaryCompareExprType(greaterThanExpr); validateBinaryExprTypeForIntFloat(greaterThanExpr, compareExprType); } @Override public void visit(LessEqualExpression lessEqualExpr) { BType compareExprType = verifyBinaryCompareExprType(lessEqualExpr); validateBinaryExprTypeForIntFloat(lessEqualExpr, compareExprType); } @Override public void visit(LessThanExpression lessThanExpr) { BType compareExprType = verifyBinaryCompareExprType(lessThanExpr); validateBinaryExprTypeForIntFloat(lessThanExpr, compareExprType); } @Override public void visit(RefTypeInitExpr refTypeInitExpr) { visitMapJsonInitExpr(refTypeInitExpr); } @Override public void visit(MapInitExpr mapInitExpr) { visitMapJsonInitExpr(mapInitExpr); } @Override public void visit(JSONInitExpr jsonInitExpr) { visitMapJsonInitExpr(jsonInitExpr); } @Override public void visit(JSONArrayInitExpr jsonArrayInitExpr) { BType inheritedType = jsonArrayInitExpr.getInheritedType(); jsonArrayInitExpr.setType(inheritedType); BType inheritedElementType; if (inheritedType instanceof BArrayType) { inheritedElementType = ((BArrayType) inheritedType).getElementType(); } else { inheritedElementType = inheritedType; } Expression[] argExprs = jsonArrayInitExpr.getArgExprs(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; if (argExpr instanceof RefTypeInitExpr) { argExpr = getNestedInitExpr(argExpr, inheritedElementType); argExprs[i] = argExpr; } visitSingleValueExpr(argExpr); BType argExprType = argExpr.getType(); if (BTypes.isValueType(argExprType)) { TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, argExpr); if (typeCastExpr != null) { argExprs[i] = typeCastExpr; } else { BLangExceptionHelper.throwSemanticError(argExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExprType.getSymbolName(), inheritedType.getSymbolName()); } continue; } if (argExprType != BTypes.typeNull && isAssignableTo(inheritedElementType, argExprType)) { continue; } TypeCastExpression typeCastExpr = checkWideningPossible(inheritedElementType, argExpr); if (typeCastExpr == null) { BLangExceptionHelper.throwSemanticError(jsonArrayInitExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), inheritedElementType); } argExprs[i] = typeCastExpr; } } @Override public void visit(ConnectorInitExpr connectorInitExpr) { BType inheritedType = connectorInitExpr.getInheritedType(); if (!(inheritedType instanceof BallerinaConnectorDef)) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_INIT_NOT_ALLOWED); } connectorInitExpr.setType(inheritedType); for (Expression argExpr : connectorInitExpr.getArgExprs()) { visitSingleValueExpr(argExpr); } Expression[] argExprs = connectorInitExpr.getArgExprs(); ParameterDef[] parameterDefs = ((BallerinaConnectorDef) inheritedType).getParameterDefs(); for (int i = 0; i < argExprs.length; i++) { int j = i; if (((BallerinaConnectorDef) inheritedType).isFilterConnector()) { j += 1; } SimpleTypeName simpleTypeName = parameterDefs[j].getTypeName(); BType paramType = BTypes.resolveType(simpleTypeName, currentScope, connectorInitExpr.getNodeLocation()); parameterDefs[j].setType(paramType); Expression argExpr = argExprs[i]; if (!(parameterDefs[j].getType().equals(argExpr.getType()))) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.INCOMPATIBLE_TYPES, parameterDefs[j].getType(), argExpr.getType()); } } ConnectorInitExpr filterConnectorInitExpr = connectorInitExpr.getParentConnectorInitExpr(); if (filterConnectorInitExpr != null) { visit(filterConnectorInitExpr); BType filterConnectorType = filterConnectorInitExpr.getFilterSupportedType(); if (filterConnectorType != null && filterConnectorType instanceof BallerinaConnectorDef) { if (!filterConnectorType.equals(inheritedType)) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_TYPES_NOT_EQUIVALENT, inheritedType, filterConnectorInitExpr.getInheritedType()); } } } } @Override public void visit(ArrayInitExpr arrayInitExpr) { if (!(arrayInitExpr.getInheritedType() instanceof BArrayType)) { BLangExceptionHelper.throwSemanticError(arrayInitExpr, SemanticErrors.ARRAY_INIT_NOT_ALLOWED_HERE); } visitArrayInitExpr(arrayInitExpr); } private void visitArrayInitExpr(ArrayInitExpr arrayInitExpr) { BType inheritedType = arrayInitExpr.getInheritedType(); arrayInitExpr.setType(inheritedType); Expression[] argExprs = arrayInitExpr.getArgExprs(); if (argExprs.length == 0) { return; } BType expectedElementType = ((BArrayType) inheritedType).getElementType(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; if (argExpr instanceof RefTypeInitExpr) { ((RefTypeInitExpr) argExpr).setInheritedType(expectedElementType); argExpr = getNestedInitExpr(argExpr, expectedElementType); argExprs[i] = argExpr; } visitSingleValueExpr(argExpr); AssignabilityResult result = performAssignabilityCheck(expectedElementType, argExpr); if (result.expression != null) { argExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(argExpr, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, argExpr.getType(), expectedElementType); } } } /** * Visit and analyze ballerina Struct initializing expression. */ @Override public void visit(StructInitExpr structInitExpr) { BType inheritedType = structInitExpr.getInheritedType(); structInitExpr.setType(inheritedType); Expression[] argExprs = structInitExpr.getArgExprs(); if (argExprs.length == 0) { return; } StructDef structDef = (StructDef) inheritedType; for (Expression argExpr : argExprs) { KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr; Expression keyExpr = keyValueExpr.getKeyExpr(); if (!(keyExpr instanceof SimpleVarRefExpr)) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.INVALID_FIELD_NAME_STRUCT_INIT); } SimpleVarRefExpr varRefExpr = (SimpleVarRefExpr) keyExpr; BLangSymbol varDefSymbol = structDef.resolveMembers(new SymbolName(varRefExpr.getSymbolName().getName(), structDef.getPackagePath())); if (varDefSymbol == null) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, varRefExpr.getVarName(), structDef.getName()); } if (!(varDefSymbol instanceof SimpleVariableDef)) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, varDefSymbol.getSymbolName()); } SimpleVariableDef varDef = (SimpleVariableDef) varDefSymbol; varRefExpr.setVariableDef(varDef); BType structFieldType = varDef.getType(); Expression valueExpr = keyValueExpr.getValueExpr(); if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, structFieldType); keyValueExpr.setValueExpr(valueExpr); } valueExpr.accept(this); AssignabilityResult result = performAssignabilityCheck(structFieldType, valueExpr); if (result.expression != null) { valueExpr = result.expression; keyValueExpr.setValueExpr(valueExpr); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INCOMPATIBLE_TYPES, varDef.getType(), valueExpr.getType()); } } } @Override public void visit(KeyValueExpr keyValueExpr) { } @Override public void visit(SimpleVarRefExpr simpleVarRefExpr) { if (simpleVarRefExpr.getPkgName() != null && simpleVarRefExpr.getPkgPath() == null) { throw BLangExceptionHelper.getSemanticError(simpleVarRefExpr.getNodeLocation(), SemanticErrors.UNDEFINED_PACKAGE_NAME, simpleVarRefExpr.getPkgName(), simpleVarRefExpr.getPkgName() + ":" + simpleVarRefExpr.getVarName()); } SymbolName symbolName = simpleVarRefExpr.getSymbolName(); BLangSymbol varDefSymbol = currentScope.resolve(symbolName); if (varDefSymbol == null) { BLangExceptionHelper.throwSemanticError(simpleVarRefExpr, SemanticErrors.UNDEFINED_SYMBOL, symbolName); } if (!(varDefSymbol instanceof VariableDef)) { throw BLangExceptionHelper.getSemanticError(simpleVarRefExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); } simpleVarRefExpr.setVariableDef((VariableDef) varDefSymbol); } @Override public void visit(FieldBasedVarRefExpr fieldBasedVarRefExpr) { String fieldName = fieldBasedVarRefExpr.getFieldName(); VariableReferenceExpr varRefExpr = fieldBasedVarRefExpr.getVarRefExpr(); varRefExpr.accept(this); BType varRefType = varRefExpr.getType(); if (varRefType instanceof StructDef) { StructDef structDef = (StructDef) varRefType; BLangSymbol fieldSymbol = structDef.resolveMembers(new SymbolName(fieldName, structDef.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, fieldName, structDef.getName()); } SimpleVariableDef fieldDef = (SimpleVariableDef) fieldSymbol; fieldBasedVarRefExpr.setFieldDef(fieldDef); fieldBasedVarRefExpr.setType(fieldDef.getType()); } else if (varRefType == BTypes.typeMap) { fieldBasedVarRefExpr.setType(((BMapType) varRefType).getElementType()); } else if (varRefType == BTypes.typeJSON) { fieldBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof BJSONConstraintType) { StructDef structDefReference = (StructDef) ((BJSONConstraintType) varRefType).getConstraint(); BLangSymbol fieldSymbol = structDefReference.resolveMembers( new SymbolName(fieldName, structDefReference.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper .getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_JSON_STRUCT, fieldName, structDefReference.getName()); } VariableDef fieldDef = (VariableDef) fieldSymbol; fieldBasedVarRefExpr.setFieldDef(fieldDef); fieldBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof BArrayType && fieldName.equals("length")) { if (fieldBasedVarRefExpr.isLHSExpr()) { throw BLangExceptionHelper.getSemanticError(fieldBasedVarRefExpr.getNodeLocation(), SemanticErrors.CANNOT_ASSIGN_VALUE_ARRAY_LENGTH); } fieldBasedVarRefExpr.setType(BTypes.typeInt); } else { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefType); } } @Override public void visit(IndexBasedVarRefExpr indexBasedVarRefExpr) { Expression indexExpr = indexBasedVarRefExpr.getIndexExpr(); indexExpr.accept(this); VariableReferenceExpr varRefExpr = indexBasedVarRefExpr.getVarRefExpr(); varRefExpr.accept(this); BType varRefType = varRefExpr.getType(); if (varRefType instanceof BArrayType) { if (indexExpr.getType() != BTypes.typeInt) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.NON_INTEGER_ARRAY_INDEX, indexExpr.getType()); } BArrayType arrayType = (BArrayType) varRefType; indexBasedVarRefExpr.setType(arrayType.getElementType()); } else if (varRefType == BTypes.typeMap) { if (indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.NON_STRING_MAP_INDEX, indexExpr.getType()); } BMapType mapType = (BMapType) varRefType; indexBasedVarRefExpr.setType(mapType.getElementType()); } else if (varRefType.getTag() == TypeTags.C_JSON_TAG) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefExpr.getType().toString()); } else if (varRefType == BTypes.typeJSON) { if (indexExpr.getType() != BTypes.typeInt && indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, "string or int", varRefExpr.getType()); } indexBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof StructDef) { if (indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, varRefExpr.getType()); } if (!(indexExpr instanceof BasicLiteral)) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.DYNAMIC_KEYS_NOT_SUPPORTED_FOR_STRUCT); } String fieldName = ((BasicLiteral) indexExpr).getBValue().stringValue(); StructDef structDef = (StructDef) varRefType; BLangSymbol fieldSymbol = structDef.resolveMembers(new SymbolName(fieldName, structDef.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, fieldName, structDef.getName()); } SimpleVariableDef fieldDef = (SimpleVariableDef) fieldSymbol; indexBasedVarRefExpr.setFieldDef(fieldDef); indexBasedVarRefExpr.setType(fieldDef.getType()); } else { throw BLangExceptionHelper.getSemanticError(indexBasedVarRefExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefType); } } @Override public void visit(XMLAttributesRefExpr xmlAttributesRefExpr) { VariableReferenceExpr varRefExpr = xmlAttributesRefExpr.getVarRefExpr(); varRefExpr.accept(this); if (varRefExpr.getType() != BTypes.typeXML) { BLangExceptionHelper.throwSemanticError(xmlAttributesRefExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeXML, varRefExpr.getType()); } Expression indexExpr = xmlAttributesRefExpr.getIndexExpr(); if (indexExpr == null) { if (xmlAttributesRefExpr.isLHSExpr()) { BLangExceptionHelper.throwSemanticError(xmlAttributesRefExpr, SemanticErrors.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED); } xmlAttributesRefExpr.setType(BTypes.typeXMLAttributes); return; } xmlAttributesRefExpr.setType(BTypes.typeString); indexExpr.accept(this); if (indexExpr instanceof XMLQNameExpr) { ((XMLQNameExpr) indexExpr).setUsedInXML(true); return; } if (indexExpr.getType() != BTypes.typeString) { BLangExceptionHelper.throwSemanticError(indexExpr, SemanticErrors.NON_STRING_MAP_INDEX, indexExpr.getType()); } Map<String, Expression> namespaces = getNamespaceInScope(xmlAttributesRefExpr.getNodeLocation()); xmlAttributesRefExpr.setNamespaces(namespaces); } @Override public void visit(XMLQNameExpr xmlQNameRefExpr) { if (xmlQNameRefExpr.isLHSExpr()) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.XML_QNAME_UPDATE_NOT_ALLOWED); } xmlQNameRefExpr.setType(BTypes.typeString); String prefix = xmlQNameRefExpr.getPrefix(); if (prefix.isEmpty()) { return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.INVALID_NAMESPACE_PREFIX, prefix); } NamespaceSymbolName nsSymbolName = new NamespaceSymbolName(prefix); BLangSymbol symbol = currentScope.resolve(nsSymbolName); if (symbol == null) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.UNDEFINED_NAMESPACE, prefix); } String namepsaceUri = ((NamespaceDeclaration) symbol).getNamespaceUri(); BasicLiteral namespaceUriLiteral = new BasicLiteral(xmlQNameRefExpr.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(namepsaceUri)); namespaceUriLiteral.accept(this); xmlQNameRefExpr.setNamepsaceUri(namespaceUriLiteral); } @Override public void visit(TypeCastExpression typeCastExpr) { boolean isMultiReturn = typeCastExpr.isMultiReturnExpr(); Expression rExpr = typeCastExpr.getRExpr(); visitSingleValueExpr(rExpr); BType sourceType = rExpr.getType(); BType targetType = typeCastExpr.getType(); if (targetType == null) { targetType = BTypes.resolveType(typeCastExpr.getTypeName(), currentScope, typeCastExpr.getNodeLocation()); typeCastExpr.setType(targetType); } if (sourceType instanceof BFunctionType || targetType instanceof BFunctionType) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } if (rExpr instanceof NullLiteral) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } TypeEdge newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null); if (newEdge != null) { typeCastExpr.setOpcode(newEdge.getOpcode()); if (!newEdge.isSafe() && !isMultiReturn) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.UNSAFE_CAST_ATTEMPT, sourceType, targetType); } if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else if (sourceType == targetType) { typeCastExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else if ((sourceType.getTag() == TypeTags.C_JSON_TAG && targetType.getTag() == TypeTags.C_JSON_TAG) && TypeLattice.isAssignCompatible((StructDef) ((BJSONConstraintType) targetType).getConstraint(), (StructDef) ((BJSONConstraintType) sourceType).getConstraint())) { typeCastExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else { boolean isUnsafeCastPossible = false; if (isMultiReturn) { isUnsafeCastPossible = checkUnsafeCastPossible(sourceType, targetType); } if (isUnsafeCastPossible) { typeCastExpr.setOpcode(InstructionCodes.CHECKCAST); } else { TypeEdge conversionEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sourceType, targetType, null); if (conversionEdge != null) { throw BLangExceptionHelper.getSemanticError(typeCastExpr.getNodeLocation(), SemanticErrors.CANNOT_CAST_WITH_SUGGESTION, sourceType, targetType); } throw BLangExceptionHelper.getSemanticError(typeCastExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } } BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_CAST_ERROR, ERRORS_PACKAGE)); if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_CAST_ERROR); } typeCastExpr.setTypes(new BType[]{targetType, (BType) error}); } @Override public void visit(TypeConversionExpr typeConversionExpr) { boolean isMultiReturn = typeConversionExpr.isMultiReturnExpr(); Expression rExpr = typeConversionExpr.getRExpr(); visitSingleValueExpr(rExpr); BType sourceType = rExpr.getType(); BType targetType = typeConversionExpr.getType(); if (targetType == null) { targetType = BTypes.resolveType(typeConversionExpr.getTypeName(), currentScope, null); typeConversionExpr.setType(targetType); } if (rExpr instanceof NullLiteral) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, sourceType, targetType); } TypeEdge newEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sourceType, targetType, null); if (newEdge != null) { typeConversionExpr.setOpcode(newEdge.getOpcode()); if (!newEdge.isSafe() && !isMultiReturn) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.UNSAFE_CONVERSION_ATTEMPT, sourceType, targetType); } if (!isMultiReturn) { typeConversionExpr.setTypes(new BType[]{targetType}); return; } } else if (sourceType == targetType) { typeConversionExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeConversionExpr.setTypes(new BType[]{targetType}); return; } } else { TypeEdge castEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null); if (castEdge != null) { throw BLangExceptionHelper.getSemanticError(typeConversionExpr.getNodeLocation(), SemanticErrors.CANNOT_CONVERT_WITH_SUGGESTION, sourceType, targetType); } throw BLangExceptionHelper.getSemanticError(typeConversionExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, sourceType, targetType); } BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_CONVERSION_ERROR, ERRORS_PACKAGE)); if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_CAST_ERROR); } typeConversionExpr.setTypes(new BType[]{targetType, (BType) error}); } @Override public void visit(NullLiteral nullLiteral) { nullLiteral.setType(BTypes.typeNull); } @Override public void visit(LambdaExpression lambdaExpr) { } @Override public void visit(StringTemplateLiteral stringTemplateLiteral) { Expression[] items = stringTemplateLiteral.getArgExprs(); Expression concatExpr; if (items.length == 1) { concatExpr = items[0]; } else { concatExpr = items[0]; for (int i = 1; i < items.length; i++) { Expression currentItem = items[i]; concatExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), concatExpr, currentItem); } } concatExpr.accept(this); concatExpr.setType(BTypes.typeString); stringTemplateLiteral.setConcatExpr(concatExpr); stringTemplateLiteral.setType(BTypes.typeString); } @Override public void visit(NamespaceDeclarationStmt namespaceDeclarationStmt) { namespaceDeclarationStmt.getNamespaceDclr().accept(this); } @Override public void visit(NamespaceDeclaration namespaceDclr) { if (namespaceDclr.getNamespaceUri().isEmpty() && !namespaceDclr.getPrefix().isEmpty()) { BLangExceptionHelper.throwSemanticError(namespaceDclr, SemanticErrors.INVALID_NAMESPACE_DECLARATION, namespaceDclr.getPrefix()); } NamespaceSymbolName nsSymbolName = new NamespaceSymbolName(namespaceDclr.getPrefix()); BLangSymbol nsSymbol = currentScope.resolve(nsSymbolName); if (nsSymbol != null && nsSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { BLangExceptionHelper.throwSemanticError(namespaceDclr, SemanticErrors.REDECLARED_SYMBOL, namespaceDclr.getPrefix()); } currentScope.define(nsSymbolName, namespaceDclr); } @Override public void visit(XMLLiteral xmlLiteral) { } @Override public void visit(XMLElementLiteral xmlElementLiteral) { Expression startTagName = xmlElementLiteral.getStartTagName(); Map<String, Expression> namespaces; XMLElementLiteral parent = xmlElementLiteral.getParent(); if (parent == null) { namespaces = getNamespaceInScope(xmlElementLiteral.getNodeLocation()); } else { namespaces = parent.getNamespaces(); xmlElementLiteral.setDefaultNamespaceUri(parent.getDefaultNamespaceUri()); } xmlElementLiteral.setNamespaces(namespaces); List<KeyValueExpr> attributes = xmlElementLiteral.getAttributes(); Iterator<KeyValueExpr> attrItr = attributes.iterator(); while (attrItr.hasNext()) { KeyValueExpr attribute = attrItr.next(); Expression attrNameExpr = attribute.getKeyExpr(); if (!(attrNameExpr instanceof XMLQNameExpr)) { continue; } Expression attrValueExpr = attribute.getValueExpr(); XMLQNameExpr xmlQNameRefExpr = (XMLQNameExpr) attrNameExpr; if (xmlQNameRefExpr.getPrefix().equals(XMLConstants.XMLNS_ATTRIBUTE)) { attrValueExpr.accept(this); if (attrValueExpr instanceof BasicLiteral && ((BasicLiteral) attrValueExpr).getBValue().stringValue().isEmpty()) { BLangExceptionHelper.throwSemanticError(attribute, SemanticErrors.INVALID_NAMESPACE_DECLARATION, xmlQNameRefExpr.getLocalname()); } namespaces.put(xmlQNameRefExpr.getLocalname(), attrValueExpr); attrItr.remove(); continue; } if (xmlQNameRefExpr.getLocalname().equals(XMLConstants.XMLNS_ATTRIBUTE)) { attrValueExpr.accept(this); xmlElementLiteral.setDefaultNamespaceUri(attrValueExpr); attrItr.remove(); } } if (xmlElementLiteral.getDefaultNamespaceUri() == null) { BasicLiteral defaultnsUriLiteral = new BasicLiteral(xmlElementLiteral.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(XMLConstants.XMLNS_ATTRIBUTE_NS_URI)); defaultnsUriLiteral.setType(BTypes.typeString); defaultnsUriLiteral.accept(this); xmlElementLiteral.setDefaultNamespaceUri(defaultnsUriLiteral); } validateXMLLiteralAttributes(attributes, namespaces); if (startTagName instanceof XMLQNameExpr) { validateXMLQname((XMLQNameExpr) startTagName, namespaces, xmlElementLiteral.getDefaultNamespaceUri()); } else { startTagName.accept(this); } if (startTagName.getType() != BTypes.typeString) { startTagName = createImplicitStringConversionExpr(startTagName, startTagName.getType()); xmlElementLiteral.setStartTagName(startTagName); } validateXMLLiteralEndTag(xmlElementLiteral, xmlElementLiteral.getDefaultNamespaceUri()); XMLSequenceLiteral children = xmlElementLiteral.getContent(); if (children != null) { children.accept(this); } } @Override public void visit(XMLCommentLiteral xmlComment) { Expression contentExpr = xmlComment.getContent(); if (contentExpr == null) { return; } contentExpr.accept(this); if (contentExpr.getType() != BTypes.typeString) { contentExpr = createImplicitStringConversionExpr(contentExpr, contentExpr.getType()); xmlComment.setContent(contentExpr); } } @Override public void visit(XMLTextLiteral xmlText) { Expression contentExpr = xmlText.getContent(); if (contentExpr == null) { return; } contentExpr.accept(this); } @Override public void visit(XMLSequenceLiteral xmlSequence) { Expression[] items = xmlSequence.getItems(); List<Expression> newItems = new ArrayList<Expression>(); Expression addExpr = null; for (int i = 0; i < items.length; i++) { Expression currentItem = items[i]; currentItem.accept(this); if (xmlSequence.hasParent() && currentItem.getType() == BTypes.typeXML) { if (addExpr != null) { newItems.add(addExpr); addExpr = null; } newItems.add(currentItem); continue; } if (currentItem.getType() != BTypes.typeString) { Expression castExpr = getImplicitConversionExpr(currentItem, currentItem.getType(), BTypes.typeString); if (castExpr == null) { if (xmlSequence.hasParent()) { BLangExceptionHelper.throwSemanticError(currentItem, SemanticErrors.INCOMPATIBLE_TYPES_IN_XML_TEMPLATE, currentItem.getType()); } BLangExceptionHelper.throwSemanticError(currentItem, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, currentItem.getType()); } currentItem = castExpr; } if (addExpr == null) { addExpr = currentItem; continue; } if (addExpr.getType() == BTypes.typeString) { addExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), addExpr, currentItem); } else { newItems.add(addExpr); addExpr = currentItem; } addExpr.setType(BTypes.typeString); } if (addExpr != null) { newItems.add(addExpr); } items = newItems.toArray(new Expression[newItems.size()]); xmlSequence.setItems(items); xmlSequence.setConcatExpr(getXMLConcatExpression(items)); } @Override public void visit(XMLPILiteral xmlPI) { Expression target = xmlPI.getTarget(); target.accept(this); if (target.getType() != BTypes.typeString) { target = createImplicitStringConversionExpr(target, target.getType()); xmlPI.setTarget(target); } Expression data = xmlPI.getData(); if (data == null) { return; } data.accept(this); if (data.getType() != BTypes.typeString) { data = createImplicitStringConversionExpr(data, data.getType()); xmlPI.setData(data); } } private void openScope(SymbolScope symbolScope) { currentScope = symbolScope; } private void closeScope() { currentScope = currentScope.getEnclosingScope(); } private void visitBinaryExpr(BinaryExpression expr) { visitSingleValueExpr(expr.getLExpr()); visitSingleValueExpr(expr.getRExpr()); } private void visitSingleValueExpr(Expression expr) { expr.accept(this); if (expr.isMultiReturnExpr()) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) expr; String nameWithPkgName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" + funcIExpr.getName() : funcIExpr.getName(); BLangExceptionHelper.throwSemanticError(expr, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT, nameWithPkgName); } } private void validateBinaryExprTypeForIntFloat(BinaryExpression binaryExpr, BType binaryExprType) { if (binaryExprType != BTypes.typeInt && binaryExprType != BTypes.typeFloat) { throwInvalidBinaryOpError(binaryExpr); } } private BType verifyBinaryArithmeticExprType(BinaryArithmeticExpression binaryArithmeticExpr) { visitBinaryExpr(binaryArithmeticExpr); BType type = verifyBinaryExprType(binaryArithmeticExpr); binaryArithmeticExpr.setType(type); return type; } private BType verifyBinaryCompareExprType(BinaryExpression binaryExpression) { visitBinaryExpr(binaryExpression); BType type = verifyBinaryExprType(binaryExpression); binaryExpression.setType(BTypes.typeBoolean); return type; } private void verifyBinaryEqualityExprType(BinaryExpression binaryExpr) { visitBinaryExpr(binaryExpr); BType rType = binaryExpr.getRExpr().getType(); BType lType = binaryExpr.getLExpr().getType(); BType type; if (rType == BTypes.typeNull) { if (BTypes.isValueType(lType)) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType); } type = rType; } else if (lType == BTypes.typeNull) { if (BTypes.isValueType(rType)) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType); } type = lType; } else { type = verifyBinaryExprType(binaryExpr); } binaryExpr.setType(BTypes.typeBoolean); if (type != BTypes.typeInt && type != BTypes.typeFloat && type != BTypes.typeBoolean && type != BTypes.typeString && type != BTypes.typeNull && type != BTypes.typeType) { throwInvalidBinaryOpError(binaryExpr); } } private BType verifyBinaryExprType(BinaryExpression binaryExpr) { Expression rExpr = binaryExpr.getRExpr(); Expression lExpr = binaryExpr.getLExpr(); BType rType = rExpr.getType(); BType lType = lExpr.getType(); if (rType.equals(lType)) { return rType; } if ((rType.equals(BTypes.typeString) || lType.equals(BTypes.typeString)) && !(binaryExpr.getOperator().equals(Operator.ADD))) { throw getInvalidBinaryOpError(binaryExpr); } if ((rType.equals(BTypes.typeString))) { Expression newExpr = createConversionExpr(binaryExpr, lExpr, lType, rType); binaryExpr.setLExpr(newExpr); return rType; } else if (lType.equals(BTypes.typeString)) { Expression newExpr = createConversionExpr(binaryExpr, rExpr, rType, lType); binaryExpr.setRExpr(newExpr); return lType; } if (rType.equals(BTypes.typeInt) && lType.equals(BTypes.typeFloat)) { Expression newExpr = createConversionExpr(binaryExpr, rExpr, rType, lType); binaryExpr.setRExpr(newExpr); return lType; } if (lType.equals(BTypes.typeInt) && rType.equals(BTypes.typeFloat)) { Expression newExpr = createConversionExpr(binaryExpr, lExpr, lType, rType); binaryExpr.setLExpr(newExpr); return rType; } throw getInvalidBinaryOpError(binaryExpr); } private Expression createConversionExpr(BinaryExpression binaryExpr, Expression sExpr, BType sType, BType tType) { Expression conversionExpr = getImplicitConversionExpr(sExpr, sType, tType); if (conversionExpr != null) { return conversionExpr; } throw getInvalidBinaryOpError(binaryExpr); } private Expression getImplicitConversionExpr(Expression sExpr, BType sType, BType tType) { TypeEdge newEdge; newEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sType, tType, null); if (newEdge != null) { TypeConversionExpr newExpr = new TypeConversionExpr(sExpr.getNodeLocation(), sExpr.getWhiteSpaceDescriptor(), sExpr, tType); newExpr.setOpcode(newEdge.getOpcode()); newExpr.accept(this); return newExpr; } return null; } private void visitBinaryLogicalExpr(BinaryLogicalExpression expr) { visitBinaryExpr(expr); Expression rExpr = expr.getRExpr(); Expression lExpr = expr.getLExpr(); if (lExpr.getType() == BTypes.typeBoolean && rExpr.getType() == BTypes.typeBoolean) { expr.setType(BTypes.typeBoolean); } else { throwInvalidBinaryOpError(expr); } } private void checkForConstAssignment(AssignStmt assignStmt, Expression lExpr) { if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVariableDef().getKind() == VariableDef.Kind.CONSTANT) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.CANNOT_ASSIGN_VALUE_CONSTANT, ((SimpleVarRefExpr) lExpr).getSymbolName()); } } private void checkForMultiAssignmentErrors(AssignStmt assignStmt, Expression[] lExprs, CallableUnitInvocationExpr rExpr) { BType[] returnTypes = rExpr.getTypes(); if (lExprs.length != returnTypes.length) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length); } for (int i = 0; i < lExprs.length; i++) { Expression lExpr = lExprs[i]; if (lExpr instanceof SimpleVarRefExpr) { String varName = ((SimpleVarRefExpr) lExpr).getVarName(); if ("_".equals(varName)) { continue; } } BType lhsType = lExprs[i].getType(); BType rhsType = returnTypes[i]; if (isAssignableTo(lhsType, rhsType)) { continue; } BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lExpr.getType()); } } private void checkForMultiValuedCastingErrors(AssignStmt assignStmt, Expression[] lExprs, ExecutableMultiReturnExpr rExpr) { BType[] returnTypes = rExpr.getTypes(); if (lExprs.length != returnTypes.length) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length); } for (int i = 0; i < lExprs.length; i++) { Expression lExpr = lExprs[i]; BType returnType = returnTypes[i]; if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVarName().equals("_")) { continue; } if ((lExpr.getType() != BTypes.typeAny) && (!lExpr.getType().equals(returnType))) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_TYPES, returnType, lExpr.getType()); } } } private void visitLExprsOfAssignment(AssignStmt assignStmt, Expression[] lExprs) { if (assignStmt.isDeclaredWithVar()) { Set<String> varNameSet = new HashSet<>(); int declaredVarCount = 0; for (Expression expr : lExprs) { if (!(expr instanceof SimpleVarRefExpr)) { throw BLangExceptionHelper.getSemanticError(assignStmt.getNodeLocation(), SemanticErrors.INVALID_VAR_ASSIGNMENT); } SimpleVarRefExpr refExpr = (SimpleVarRefExpr) expr; String varName = refExpr.getVarName(); if (varName.equals("_")) { declaredVarCount++; continue; } if (!varNameSet.add(varName)) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.VAR_IS_REPEATED_ON_LEFT_SIDE_ASSIGNMENT, varName); } Identifier identifier = new Identifier(varName); SymbolName symbolName = new SymbolName(identifier.getName()); SimpleVariableDef variableDef = new SimpleVariableDef(refExpr.getNodeLocation(), refExpr.getWhiteSpaceDescriptor(), identifier, null, symbolName, currentScope); variableDef.setKind(VariableDef.Kind.LOCAL_VAR); SymbolName varDefSymName = new SymbolName(variableDef.getName(), currentPkg); BLangSymbol varSymbol = currentScope.resolve(varDefSymName); if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { declaredVarCount++; continue; } currentScope.define(varDefSymName, variableDef); } if (declaredVarCount == lExprs.length) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( assignStmt.getNodeLocation(), SemanticErrors.NO_NEW_VARIABLES_VAR_ASSIGNMENT)); } } int ignoredVarCount = 0; for (Expression lExpr : lExprs) { if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVarName().equals("_")) { ignoredVarCount++; continue; } ((VariableReferenceExpr) lExpr).setLHSExpr(true); lExpr.accept(this); checkForConstAssignment(assignStmt, lExpr); } if (ignoredVarCount == lExprs.length) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( assignStmt.getNodeLocation(), SemanticErrors.IGNORED_ASSIGNMENT)); } } private void linkFunction(FunctionInvocationExpr funcIExpr) { String pkgPath = funcIExpr.getPackagePath(); Expression[] exprs = funcIExpr.getArgExprs(); BType[] paramTypes = new BType[exprs.length]; for (int i = 0; i < exprs.length; i++) { paramTypes[i] = exprs[i].getType(); } FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(funcIExpr.getName(), pkgPath, paramTypes); BLangSymbol functionSymbol = currentScope.resolve(symbolName); if (functionSymbol instanceof SimpleVariableDef && ((SimpleVariableDef) functionSymbol).getType() instanceof BFunctionType) { SimpleVariableDef variableDef = (SimpleVariableDef) functionSymbol; matchAndUpdateFunctionPointsArgs(funcIExpr, symbolName, (BFunctionType) (variableDef).getType()); funcIExpr.setFunctionPointerInvocation(true); funcIExpr.setFunctionPointerVariableDef(variableDef); return; } functionSymbol = matchAndUpdateArguments(funcIExpr, symbolName, functionSymbol); if (functionSymbol == null) { String funcName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" + funcIExpr.getName() : funcIExpr.getName(); BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.UNDEFINED_FUNCTION, funcName); return; } Function function; if (functionSymbol.isNative()) { functionSymbol = ((BallerinaFunction) functionSymbol).getNativeFunction(); NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load(); SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames(); BType[] returnTypes = new BType[returnParamTypeNames.length]; for (int i = 0; i < returnParamTypeNames.length; i++) { SimpleTypeName typeName = returnParamTypeNames[i]; BType bType = BTypes.resolveType(typeName, currentScope, funcIExpr.getNodeLocation()); returnTypes[i] = bType; } if (!(nativeUnit instanceof Function)) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); } function = (Function) nativeUnit; function.setReturnParamTypes(returnTypes); } else { if (!(functionSymbol instanceof Function)) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); return; } function = (Function) functionSymbol; } funcIExpr.setCallableUnit(function); } private void linkAction(ActionInvocationExpr actionIExpr) { String pkgPath = actionIExpr.getPackagePath(); String connectorName = actionIExpr.getConnectorName(); SymbolName connectorSymbolName = new SymbolName(connectorName, pkgPath); BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName); if (connectorSymbol == null) { String connectorWithPkgName = (actionIExpr.getPackageName() != null) ? actionIExpr.getPackageName() + ":" + actionIExpr.getConnectorName() : actionIExpr.getConnectorName(); BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_CONNECTOR, connectorWithPkgName); return; } Expression[] exprs = actionIExpr.getArgExprs(); BType[] paramTypes = new BType[exprs.length]; for (int i = 0; i < exprs.length; i++) { paramTypes[i] = exprs[i].getType(); } ActionSymbolName actionSymbolName = LangModelUtils.getActionSymName(actionIExpr.getName(), actionIExpr.getPackagePath(), actionIExpr.getConnectorName(), paramTypes); BLangSymbol actionSymbol = null; if (connectorSymbol instanceof BallerinaConnectorDef) { actionSymbol = ((BallerinaConnectorDef) connectorSymbol).resolveMembers(actionSymbolName); } else { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_CONNECTOR_EXPECTED, connectorSymbolName); } actionSymbol = matchAndUpdateArguments(actionIExpr, actionSymbolName, actionSymbol); if ((actionSymbol instanceof BallerinaAction) && (actionSymbol.isNative())) { actionSymbol = ((BallerinaAction) actionSymbol).getNativeAction(); } if (actionSymbol == null) { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_ACTION, actionIExpr.getName(), connectorSymbol.getSymbolName()); } Action action = null; if (actionSymbol instanceof NativeUnitProxy) { NativeUnit nativeUnit = ((NativeUnitProxy) actionSymbol).load(); SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames(); BType[] returnTypes = new BType[returnParamTypeNames.length]; for (int i = 0; i < returnParamTypeNames.length; i++) { SimpleTypeName typeName = returnParamTypeNames[i]; BType bType = BTypes.resolveType(typeName, currentScope, actionIExpr.getNodeLocation()); returnTypes[i] = bType; } if (!(nativeUnit instanceof Action)) { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, actionSymbolName); } action = (Action) nativeUnit; action.setReturnParamTypes(returnTypes); } else if (actionSymbol instanceof Action) { action = (Action) actionSymbol; } else { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, actionSymbolName); } actionIExpr.setCallableUnit(action); } /** * Helper method to match the callable unit with invocation (check whether parameters map, do cast if applicable). * * @param callableIExpr invocation expression * @param symbolName callable symbol name * @param callableSymbol matching symbol * @return callableSymbol matching symbol */ private BLangSymbol matchAndUpdateArguments(AbstractExpression callableIExpr, CallableUnitSymbolName symbolName, BLangSymbol callableSymbol) { if (callableSymbol == null) { return null; } Expression[] argExprs = ((CallableUnitInvocationExpr) callableIExpr).getArgExprs(); Expression[] updatedArgExprs = new Expression[argExprs.length]; CallableUnitSymbolName funcSymName = (CallableUnitSymbolName) callableSymbol.getSymbolName(); if (!funcSymName.isNameAndParamCountMatch(symbolName)) { return null; } boolean implicitCastPossible = true; if (callableSymbol instanceof NativeUnitProxy) { NativeUnit nativeUnit = ((NativeUnitProxy) callableSymbol).load(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; SimpleTypeName simpleTypeName = nativeUnit.getArgumentTypeNames()[i]; BType lhsType = BTypes.resolveType(simpleTypeName, currentScope, callableIExpr.getNodeLocation()); AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { implicitCastPossible = false; break; } } } else { for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; BType lhsType = ((CallableUnit) callableSymbol).getParameterDefs()[i].getType(); AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { implicitCastPossible = false; break; } } } if (!implicitCastPossible) { return null; } for (int i = 0; i < updatedArgExprs.length; i++) { ((CallableUnitInvocationExpr) callableIExpr).getArgExprs()[i] = updatedArgExprs[i]; } return callableSymbol; } private void matchAndUpdateFunctionPointsArgs(FunctionInvocationExpr funcIExpr, CallableUnitSymbolName symbolName, BFunctionType bFunctionType) { if (symbolName.getNoOfParameters() != bFunctionType.getParameterType().length) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, funcIExpr.getName()); } Expression[] argExprs = funcIExpr.getArgExprs(); Expression[] updatedArgExprs = new Expression[argExprs.length]; for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; BType lhsType = bFunctionType.getParameterType()[i]; AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, funcIExpr.getName()); } } for (int i = 0; i < updatedArgExprs.length; i++) { funcIExpr.getArgExprs()[i] = updatedArgExprs[i]; } } private void linkWorker(WorkerInvocationStmt workerInvocationStmt) { String workerName = workerInvocationStmt.getCallableUnitName(); SymbolName workerSymbolName = new SymbolName(workerName); Worker worker = (Worker) currentScope.resolve(workerSymbolName); if (worker == null) { throw new LinkerException(workerInvocationStmt.getNodeLocation().getFileName() + ":" + workerInvocationStmt.getNodeLocation().getLineNumber() + ": undefined worker '" + workerInvocationStmt.getCallableUnitName() + "'"); } workerInvocationStmt.setCallableUnit(worker); } private void throwInvalidBinaryOpError(BinaryExpression binaryExpr) { BType lExprType = binaryExpr.getLExpr().getType(); BType rExprType = binaryExpr.getRExpr().getType(); if (lExprType == rExprType) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType); } else { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType); } } private SemanticException getInvalidBinaryOpError(BinaryExpression binaryExpr) { BType lExprType = binaryExpr.getLExpr().getType(); BType rExprType = binaryExpr.getRExpr().getType(); if (lExprType == rExprType) { return BLangExceptionHelper.getSemanticError(binaryExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType); } else { return BLangExceptionHelper.getSemanticError(binaryExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType); } } private void throwInvalidUnaryOpError(UnaryExpression unaryExpr) { BType rExprType = unaryExpr.getRExpr().getType(); BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, unaryExpr.getOperator(), rExprType); } private TypeCastExpression checkWideningPossible(BType lhsType, Expression rhsExpr) { TypeCastExpression typeCastExpr = null; BType rhsType = rhsExpr.getType(); TypeEdge typeEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rhsType, lhsType, null); if (typeEdge != null) { typeCastExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), rhsExpr.getWhiteSpaceDescriptor(), rhsExpr, lhsType); typeCastExpr.setOpcode(typeEdge.getOpcode()); } return typeCastExpr; } private void defineWorkers(Worker[] workers, CallableUnit callableUnit) { for (Worker worker : workers) { SymbolName symbolName = new SymbolName(worker.getName(), null); worker.setSymbolName(symbolName); BLangSymbol workerSymbol = callableUnit.getSymbolScope().resolve(symbolName); if (workerSymbol != null) { BLangExceptionHelper.throwSemanticError(worker, SemanticErrors.REDECLARED_SYMBOL, worker.getName()); } callableUnit.getSymbolScope().define(symbolName, worker); } } private void defineFunctions(Function[] functions) { for (Function function : functions) { ParameterDef[] paramDefArray = function.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } function.setParameterTypes(paramTypes); FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(function.getName(), function.getPackagePath(), paramTypes); function.setSymbolName(symbolName); BLangSymbol functionSymbol = currentScope.resolve(symbolName); if (!function.isNative() && functionSymbol != null) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.REDECLARED_SYMBOL, function.getName()); } if (function.isNative() && functionSymbol == null) { functionSymbol = nativeScope.resolve(symbolName); if (functionSymbol == null) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.UNDEFINED_FUNCTION, function.getName()); } if (function instanceof BallerinaFunction) { ((BallerinaFunction) function).setNativeFunction((NativeUnitProxy) functionSymbol); } } currentScope.define(symbolName, function); ParameterDef[] returnParameters = function.getReturnParameters(); BType[] returnTypes = new BType[returnParameters.length]; for (int i = 0; i < returnParameters.length; i++) { ParameterDef paramDef = returnParameters[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); returnTypes[i] = bType; } function.setReturnParamTypes(returnTypes); if (function.getWorkers().length > 0) { defineWorkers(function.getWorkers(), function); } } } private void defineConnectors(BallerinaConnectorDef[] connectorDefArray) { for (BallerinaConnectorDef connectorDef : connectorDefArray) { String connectorName = connectorDef.getName(); SymbolName connectorSymbolName = new SymbolName(connectorName, connectorDef.getPackagePath()); BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName); if (connectorSymbol != null) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.REDECLARED_SYMBOL, connectorName); } currentScope.define(connectorSymbolName, connectorDef); BLangSymbol actionSymbol; SymbolName name = new SymbolName("NativeAction." + connectorName + ".<init>", connectorDef.getPackagePath()); actionSymbol = nativeScope.resolve(name); if (actionSymbol != null) { if (actionSymbol instanceof NativeUnitProxy) { AbstractNativeAction nativeUnit = (AbstractNativeAction) ((NativeUnitProxy) actionSymbol).load(); BallerinaAction.BallerinaActionBuilder ballerinaActionBuilder = new BallerinaAction .BallerinaActionBuilder(connectorDef); ballerinaActionBuilder.setIdentifier(nativeUnit.getIdentifier()); ballerinaActionBuilder.setPkgPath(nativeUnit.getPackagePath()); ballerinaActionBuilder.setNative(nativeUnit.isNative()); ballerinaActionBuilder.setSymbolName(nativeUnit.getSymbolName()); ParameterDef paramDef = new ParameterDef(connectorDef.getNodeLocation(), null, new Identifier(nativeUnit.getArgumentNames()[0]), nativeUnit.getArgumentTypeNames()[0], new SymbolName(nativeUnit.getArgumentNames()[0], connectorDef.getPackagePath()), ballerinaActionBuilder.getCurrentScope()); paramDef.setType(connectorDef); ballerinaActionBuilder.addParameter(paramDef); BallerinaAction ballerinaAction = ballerinaActionBuilder.buildAction(); ballerinaAction.setNativeAction((NativeUnitProxy) actionSymbol); ballerinaAction.setConnectorDef(connectorDef); BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); ballerinaAction.setParameterTypes(new BType[]{bType}); connectorDef.setInitAction(ballerinaAction); } } } for (BallerinaConnectorDef connectorDef : connectorDefArray) { openScope(connectorDef); for (BallerinaAction bAction : connectorDef.getActions()) { bAction.setConnectorDef(connectorDef); defineAction(bAction, connectorDef); } closeScope(); } } private void defineAction(BallerinaAction action, BallerinaConnectorDef connectorDef) { ParameterDef[] paramDefArray = action.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } action.setParameterTypes(paramTypes); ActionSymbolName symbolName = LangModelUtils.getActionSymName(action.getName(), action.getPackagePath(), connectorDef.getName(), paramTypes); action.setSymbolName(symbolName); BLangSymbol actionSymbol = currentScope.resolve(symbolName); if (actionSymbol != null) { BLangExceptionHelper.throwSemanticError(action, SemanticErrors.REDECLARED_SYMBOL, action.getName()); } currentScope.define(symbolName, action); if (action.isNative()) { ActionSymbolName nativeActionSymName = LangModelUtils.getNativeActionSymName(action.getName(), connectorDef.getName(), action.getPackagePath(), paramTypes); BLangSymbol nativeAction = nativeScope.resolve(nativeActionSymName); if (nativeAction == null || !(nativeAction instanceof NativeUnitProxy)) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.UNDEFINED_NATIVE_ACTION, action.getName(), connectorDef.getName()); return; } action.setNativeAction((NativeUnitProxy) nativeAction); } ParameterDef[] returnParameters = action.getReturnParameters(); BType[] returnTypes = new BType[returnParameters.length]; for (int i = 0; i < returnParameters.length; i++) { ParameterDef paramDef = returnParameters[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); returnTypes[i] = bType; } action.setReturnParamTypes(returnTypes); if (action.getWorkers().length > 0) { defineWorkers(action.getWorkers(), action); } } private void defineServices(Service[] services) { for (Service service : services) { if (currentScope.resolve(service.getSymbolName()) != null) { BLangExceptionHelper.throwSemanticError(service, SemanticErrors.REDECLARED_SYMBOL, service.getName()); } currentScope.define(service.getSymbolName(), service); openScope(service); for (Resource resource : service.getResources()) { defineResource(resource, service); } closeScope(); } } private void defineResource(Resource resource, Service service) { ParameterDef[] paramDefArray = resource.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } resource.setParameterTypes(paramTypes); SymbolName symbolName = LangModelUtils.getResourceSymName(resource.getName(), resource.getPackagePath(), service.getName()); resource.setSymbolName(symbolName); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(resource, SemanticErrors.REDECLARED_SYMBOL, resource.getName()); } currentScope.define(symbolName, resource); if (resource.getWorkers().length > 0) { defineWorkers(resource.getWorkers(), resource); } } private void defineStructs(StructDef[] structDefs) { for (StructDef structDef : structDefs) { SymbolName symbolName = new SymbolName(structDef.getName(), structDef.getPackagePath()); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(structDef, SemanticErrors.REDECLARED_SYMBOL, structDef.getName()); } currentScope.define(symbolName, structDef); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder( structDef.getNodeLocation(), structDef); for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) { blockStmtBuilder.addStmt(variableDefStmt); } BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(structDef); functionBuilder.setNodeLocation(structDef.getNodeLocation()); functionBuilder.setIdentifier(new Identifier(structDef + ".<init>")); functionBuilder.setPkgPath(structDef.getPackagePath()); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); structDef.setInitFunction(functionBuilder.buildFunction()); } for (StructDef structDef : structDefs) { SymbolScope tmpScope = currentScope; currentScope = structDef; for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) { fieldDefStmt.getVariableDef().setKind(VariableDef.Kind.STRUCT_FIELD); fieldDefStmt.accept(this); } currentScope = tmpScope; } for (StructDef structDef : structDefs) { TypeLattice.addStructEdges(structDef, currentScope); } } /** * Add the annotation definitions to the current scope. * * @param annotationDefs Annotations definitions list */ private void defineAnnotations(AnnotationDef[] annotationDefs) { for (AnnotationDef annotationDef : annotationDefs) { SymbolName symbolName = new SymbolName(annotationDef.getName(), currentPkg); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(annotationDef, SemanticErrors.REDECLARED_SYMBOL, annotationDef.getSymbolName().getName()); } currentScope.define(symbolName, annotationDef); } } /** * Create the '<init>' function and inject it to the connector. * * @param connectorDef connector model object */ private void createConnectorInitFunction(BallerinaConnectorDef connectorDef) { NodeLocation location = connectorDef.getNodeLocation(); BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(connectorDef); functionBuilder.setNodeLocation(location); functionBuilder.setIdentifier(new Identifier(connectorDef.getName() + ".<init>")); functionBuilder.setPkgPath(connectorDef.getPackagePath()); ParameterDef paramDef = new ParameterDef(location, null, new Identifier("connector"), null, new SymbolName("connector"), functionBuilder.getCurrentScope()); paramDef.setType(connectorDef); functionBuilder.addParameter(paramDef); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(location, connectorDef); for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); blockStmtBuilder.addStmt(assignStmt); } ReturnStmt returnStmt = new ReturnStmt(location, null, new Expression[0]); blockStmtBuilder.addStmt(returnStmt); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); connectorDef.setInitFunction(functionBuilder.buildFunction()); } /** * Create the '<init>' function and inject it to the service. * * @param service service model object */ private void createServiceInitFunction(Service service) { NodeLocation location = service.getNodeLocation(); BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(service); functionBuilder.setNodeLocation(location); functionBuilder.setIdentifier(new Identifier(service.getName() + ".<init>")); functionBuilder.setPkgPath(service.getPackagePath()); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(location, service); for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); blockStmtBuilder.addStmt(assignStmt); } ReturnStmt returnStmt = new ReturnStmt(location, null, new Expression[0]); blockStmtBuilder.addStmt(returnStmt); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); service.setInitFunction(functionBuilder.buildFunction()); } private void resolveStructFieldTypes(StructDef[] structDefs) { for (StructDef structDef : structDefs) { for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) { VariableDef fieldDef = fieldDefStmt.getVariableDef(); BType fieldType = BTypes.resolveType(fieldDef.getTypeName(), currentScope, fieldDef.getNodeLocation()); fieldDef.setType(fieldType); } } } private void checkUnreachableStmt(Statement[] stmts, int stmtIndex) { if (stmts.length > stmtIndex) { if (stmts[stmtIndex] instanceof CommentStmt) { checkUnreachableStmt(stmts, ++stmtIndex); } else { BLangExceptionHelper.throwSemanticError(stmts[stmtIndex], SemanticErrors.UNREACHABLE_STATEMENT); } } } /** * Recursively visits a nested init expression. Reconstruct the init expression with the * specific init expression type, and replaces the generic {@link RefTypeInitExpr}. * * @param fieldType Type of the current field * @return reconstructed nested init expression */ private RefTypeInitExpr getNestedInitExpr(Expression expr, BType fieldType) { RefTypeInitExpr refTypeInitExpr = (RefTypeInitExpr) expr; if (refTypeInitExpr instanceof ArrayInitExpr) { if (fieldType == BTypes.typeAny || fieldType == BTypes.typeMap) { fieldType = BTypes.resolveType(new SimpleTypeName(BTypes.typeAny.getName(), true, 1), currentScope, expr.getNodeLocation()); } else if (getElementType(fieldType) == BTypes.typeJSON) { refTypeInitExpr = new JSONArrayInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } } else { if (fieldType == BTypes.typeAny) { fieldType = BTypes.typeMap; } if (fieldType == BTypes.typeMap) { refTypeInitExpr = new MapInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } else if (fieldType == BTypes.typeJSON || fieldType instanceof BJSONConstraintType) { refTypeInitExpr = new JSONInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } else if (fieldType instanceof StructDef) { refTypeInitExpr = new StructInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } if (refTypeInitExpr instanceof ConnectorInitExpr) { ConnectorInitExpr filterConnectorInitExpr = ((ConnectorInitExpr) refTypeInitExpr). getParentConnectorInitExpr(); BType type = null; while (filterConnectorInitExpr != null) { BLangSymbol symbol = currentPackageScope.resolve(new SymbolName(filterConnectorInitExpr. getTypeName().getName(), currentPkg)); if (symbol instanceof BallerinaConnectorDef) { type = (BType) symbol; filterConnectorInitExpr.setInheritedType(type); type = BTypes.resolveType(((BallerinaConnectorDef) symbol). getFilterSupportedType(), currentScope, refTypeInitExpr.getNodeLocation()); if (type != null) { filterConnectorInitExpr.setFilterSupportedType(type); } } filterConnectorInitExpr = (filterConnectorInitExpr). getParentConnectorInitExpr(); } } } refTypeInitExpr.setInheritedType(fieldType); return refTypeInitExpr; } private BType getElementType(BType type) { if (type.getTag() != TypeTags.ARRAY_TAG) { return type; } return getElementType(((BArrayType) type).getElementType()); } /** * Visit and validate map/json initialize expression. * * @param initExpr Expression to visit. */ private void visitMapJsonInitExpr(RefTypeInitExpr initExpr) { BType inheritedType = initExpr.getInheritedType(); initExpr.setType(inheritedType); Expression[] argExprs = initExpr.getArgExprs(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr; Expression keyExpr = keyValueExpr.getKeyExpr(); if (keyExpr instanceof SimpleVarRefExpr) { BString key = new BString(((SimpleVarRefExpr) keyExpr).getVarName()); keyExpr = new BasicLiteral(keyExpr.getNodeLocation(), keyExpr.getWhiteSpaceDescriptor(), new SimpleTypeName(TypeConstants.STRING_TNAME), key); keyValueExpr.setKeyExpr(keyExpr); } visitSingleValueExpr(keyExpr); Expression valueExpr = keyValueExpr.getValueExpr(); if (inheritedType instanceof BJSONConstraintType) { String key = ((BasicLiteral) keyExpr).getBValue().stringValue(); StructDef constraintStructDef = (StructDef) ((BJSONConstraintType) inheritedType).getConstraint(); if (constraintStructDef != null) { BLangSymbol varDefSymbol = constraintStructDef.resolveMembers( new SymbolName(key, constraintStructDef.getPackagePath())); if (varDefSymbol == null) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_JSON_STRUCT, key, constraintStructDef.getName()); } VariableDef varDef = (VariableDef) varDefSymbol; BType cJSONFieldType = new BJSONConstraintType(varDef.getType()); if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, cJSONFieldType); keyValueExpr.setValueExpr(valueExpr); } } } else { if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, inheritedType); keyValueExpr.setValueExpr(valueExpr); } } valueExpr.accept(this); BType valueExprType = valueExpr.getType(); if (inheritedType == BTypes.typeMap) { if (BTypes.isValueType(valueExprType)) { TypeCastExpression newExpr = checkWideningPossible(BTypes.typeAny, valueExpr); if (newExpr != null) { keyValueExpr.setValueExpr(newExpr); } else { BLangExceptionHelper.throwSemanticError(keyValueExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExprType.getSymbolName(), inheritedType); } } continue; } if (BTypes.isValueType(valueExprType)) { TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr); if (typeCastExpr != null) { keyValueExpr.setValueExpr(typeCastExpr); } else { BLangExceptionHelper.throwSemanticError(keyValueExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExprType.getSymbolName(), inheritedType.getSymbolName()); } continue; } if (valueExprType != BTypes.typeNull && isAssignableTo(BTypes.typeJSON, valueExprType)) { continue; } TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr); if (typeCastExpr == null) { BLangExceptionHelper.throwSemanticError(initExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExpr.getType(), BTypes.typeJSON); } keyValueExpr.setValueExpr(typeCastExpr); } } private void addDependentPkgInitCalls(List<BallerinaFunction> initFunctionList, BlockStmt.BlockStmtBuilder blockStmtBuilder, NodeLocation initFuncLocation) { for (BallerinaFunction initFunc : initFunctionList) { FunctionInvocationExpr funcIExpr = new FunctionInvocationExpr(initFuncLocation, null, initFunc.getName(), null, initFunc.getPackagePath(), new Expression[]{}); funcIExpr.setCallableUnit(initFunc); FunctionInvocationStmt funcIStmt = new FunctionInvocationStmt(initFuncLocation, funcIExpr); blockStmtBuilder.addStmt(funcIStmt); } } private boolean isAssignableTo(BType lhsType, BType rhsType) { if (lhsType == BTypes.typeAny) { return true; } if (rhsType == BTypes.typeNull && !BTypes.isValueType(lhsType)) { return true; } if (lhsType == BTypes.typeJSON && rhsType.getTag() == TypeTags.C_JSON_TAG) { return true; } return lhsType == rhsType || lhsType.equals(rhsType); } private boolean checkUnsafeCastPossible(BType sourceType, BType targetType) { if (sourceType == BTypes.typeAny || targetType == BTypes.typeAny) { return true; } if (sourceType instanceof StructDef && targetType instanceof StructDef) { return true; } if (targetType.getTag() == TypeTags.ARRAY_TAG || sourceType.getTag() == TypeTags.ARRAY_TAG) { return isUnsafeArrayCastPossible(sourceType, targetType); } if (sourceType.getTag() == TypeTags.JSON_TAG && targetType.getTag() == TypeTags.C_JSON_TAG) { return true; } return false; } private boolean isUnsafeArrayCastPossible(BType sourceType, BType targetType) { if (targetType.getTag() == TypeTags.ARRAY_TAG && sourceType.getTag() == TypeTags.ARRAY_TAG) { BArrayType sourceArrayType = (BArrayType) sourceType; BArrayType targetArrayType = (BArrayType) targetType; return isUnsafeArrayCastPossible(sourceArrayType.getElementType(), targetArrayType.getElementType()); } else if (targetType.getTag() == TypeTags.ARRAY_TAG) { if (sourceType == BTypes.typeJSON) { return isUnsafeArrayCastPossible(BTypes.typeJSON, ((BArrayType) targetType).getElementType()); } return sourceType == BTypes.typeAny; } else if (sourceType.getTag() == TypeTags.ARRAY_TAG) { if (targetType == BTypes.typeJSON) { return isUnsafeArrayCastPossible(((BArrayType) sourceType).getElementType(), BTypes.typeJSON); } return targetType == BTypes.typeAny; } if (sourceType == targetType) { return true; } if (targetType == BTypes.typeAny && !BTypes.isValueType(sourceType)) { return true; } return !BTypes.isValueType(targetType) && sourceType == BTypes.typeAny; } private AssignabilityResult performAssignabilityCheck(BType lhsType, Expression rhsExpr) { AssignabilityResult assignabilityResult = new AssignabilityResult(); BType rhsType = rhsExpr.getType(); if (lhsType == rhsType) { assignabilityResult.assignable = true; return assignabilityResult; } if (rhsType == BTypes.typeNull && !BTypes.isValueType(lhsType)) { assignabilityResult.assignable = true; return assignabilityResult; } if ((rhsType instanceof BJSONConstraintType) && (lhsType == BTypes.typeJSON)) { assignabilityResult.assignable = true; return assignabilityResult; } if ((rhsType instanceof BJSONConstraintType) && (lhsType instanceof BJSONConstraintType)) { if (((BJSONConstraintType) lhsType).getConstraint() == ((BJSONConstraintType) rhsType).getConstraint()) { assignabilityResult.assignable = true; return assignabilityResult; } } TypeCastExpression implicitCastExpr = checkWideningPossible(lhsType, rhsExpr); if (implicitCastExpr != null) { assignabilityResult.assignable = true; assignabilityResult.expression = implicitCastExpr; return assignabilityResult; } if (isImplicitiCastPossible(lhsType, rhsType)) { implicitCastExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), null, rhsExpr, lhsType); implicitCastExpr.setOpcode(InstructionCodes.NOP); assignabilityResult.assignable = true; assignabilityResult.expression = implicitCastExpr; return assignabilityResult; } if (lhsType == BTypes.typeFloat && rhsType == BTypes.typeInt && rhsExpr instanceof BasicLiteral) { BasicLiteral newExpr = new BasicLiteral(rhsExpr.getNodeLocation(), rhsExpr.getWhiteSpaceDescriptor(), new SimpleTypeName(TypeConstants.FLOAT_TNAME), new BFloat(((BasicLiteral) rhsExpr) .getBValue().intValue())); visitSingleValueExpr(newExpr); assignabilityResult.assignable = true; assignabilityResult.expression = newExpr; return assignabilityResult; } if (rhsType instanceof BFunctionType && lhsType instanceof BFunctionType) { BFunctionType rhs = (BFunctionType) rhsType; BFunctionType lhs = (BFunctionType) lhsType; if (rhs.getParameterType().length == lhs.getParameterType().length && rhs.getReturnParameterType().length == lhs.getReturnParameterType().length) { for (int i = 0; i < rhs.getParameterType().length; i++) { if (!isAssignableTo(rhs.getParameterType()[i], lhs.getParameterType()[i])) { return assignabilityResult; } } for (int i = 0; i < rhs.getReturnParameterType().length; i++) { if (!isAssignableTo(rhs.getReturnParameterType()[i], lhs.getReturnParameterType()[i])) { return assignabilityResult; } } assignabilityResult.assignable = true; return assignabilityResult; } } return assignabilityResult; } private boolean isImplicitiCastPossible(BType lhsType, BType rhsType) { if (lhsType == BTypes.typeAny) { return true; } if (lhsType.getTag() == TypeTags.ARRAY_TAG || rhsType.getTag() == TypeTags.ARRAY_TAG) { return isImplicitArrayCastPossible(lhsType, rhsType); } return false; } private boolean isImplicitArrayCastPossible(BType lhsType, BType rhsType) { if (lhsType.getTag() == TypeTags.ARRAY_TAG && rhsType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) lhsType; BArrayType rhsArrayType = (BArrayType) rhsType; return isImplicitArrayCastPossible(lhrArrayType.getElementType(), rhsArrayType.getElementType()); } else if (rhsType.getTag() == TypeTags.ARRAY_TAG) { return lhsType == BTypes.typeAny; } else if (lhsType.getTag() == TypeTags.ARRAY_TAG) { return false; } if (lhsType == rhsType) { return true; } return lhsType.getTag() == BTypes.typeAny.getTag() && !BTypes.isValueType(rhsType); } /** * Helper method to add return statement if required. * * @param returnParamCount No of return parameters. * @param blockStmt Block statement to which to add the return statement. */ private void checkAndAddReturnStmt(int returnParamCount, BlockStmt blockStmt) { if (returnParamCount != 0) { return; } Statement[] statements = blockStmt.getStatements(); int length = statements.length; Statement lastStatement = statements[length - 1]; if (!(lastStatement instanceof ReturnStmt)) { NodeLocation blockLocation = blockStmt.getNodeLocation(); NodeLocation endOfBlock = new NodeLocation(blockLocation.getPackageDirPath(), blockLocation.getFileName(), blockLocation.stopLineNumber); ReturnStmt returnStmt = new ReturnStmt(endOfBlock, null, new Expression[0]); statements = Arrays.copyOf(statements, length + 1); statements[length] = returnStmt; blockStmt.setStatements(statements); } } private void checkAndAddReplyStmt(BlockStmt blockStmt) { Statement[] statements = blockStmt.getStatements(); int length = statements.length; Statement lastStatement = statements[length - 1]; if (!(lastStatement instanceof ReplyStmt)) { NodeLocation blockLocation = blockStmt.getNodeLocation(); NodeLocation endOfBlock = new NodeLocation(blockLocation.getPackageDirPath(), blockLocation.getFileName(), blockLocation.stopLineNumber); ReplyStmt replyStmt = new ReplyStmt(endOfBlock, null, null); statements = Arrays.copyOf(statements, length + 1); statements[length] = replyStmt; blockStmt.setStatements(statements); } } private void assignVariableRefTypes(Expression[] expr, BType[] returnTypes) { for (int i = 0; i < expr.length; i++) { if (expr[i] instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) expr[i]).getVarName().equals("_")) { continue; } ((SimpleVarRefExpr) expr[i]).getVariableDef().setType(returnTypes[i]); } } private static void checkParent(Statement stmt) { Statement parent = stmt; StatementKind childStmtType = stmt.getKind(); while (StatementKind.CALLABLE_UNIT_BLOCK != parent.getKind()) { if (StatementKind.WHILE_BLOCK == parent.getKind() && (StatementKind.BREAK == childStmtType || StatementKind.CONTINUE == childStmtType)) { return; } else if (StatementKind.TRANSACTION_BLOCK == parent.getKind()) { if (StatementKind.BREAK == childStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.BREAK_USED_IN_TRANSACTION); } else if (StatementKind.CONTINUE == childStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.CONTINUE_USED_IN_TRANSACTION); } } parent = parent.getParent(); } } /** * Get the XML namespaces that are visible to to the current scope. * * @param location Source location of the ballerina file * @return XML namespaces that are visible to the current scope, as a map */ private Map<String, Expression> getNamespaceInScope(NodeLocation location) { Map<String, Expression> namespaces = new HashMap<String, Expression>(); SymbolScope scope = currentScope; while (true) { for (Entry<SymbolName, BLangSymbol> symbols : scope.getSymbolMap().entrySet()) { SymbolName symbolName = symbols.getKey(); if (!(symbolName instanceof NamespaceSymbolName)) { continue; } NamespaceDeclaration namespaceDecl = (NamespaceDeclaration) symbols.getValue(); if (!namespaces.containsKey(namespaceDecl.getPrefix()) && !namespaces.containsValue(namespaceDecl.getNamespaceUri())) { BasicLiteral namespaceUriLiteral = new BasicLiteral(location, null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(namespaceDecl.getNamespaceUri())); namespaceUriLiteral.accept(this); namespaces.put(namespaceDecl.getPrefix(), namespaceUriLiteral); } } if (scope instanceof BLangPackage) { break; } scope = scope.getEnclosingScope(); } return namespaces; } /** * Create and return an XML concatenation expression using using the provided expressions. * Expressions can only be either XML type or string type. All the string type expressions * will be converted to XML text literals ({@link XMLTextLiteral}). * * @param items Expressions to create concatenating expression. * @return XML concatenating expression */ private Expression getXMLConcatExpression(Expression[] items) { if (items.length == 0) { return null; } Expression concatExpr = null; for (int i = 0; i < items.length; i++) { Expression currentItem = items[i]; if (currentItem.getType() == BTypes.typeString) { currentItem = new XMLTextLiteral(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), currentItem); items[0] = currentItem; } if (concatExpr == null) { concatExpr = currentItem; continue; } concatExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), concatExpr, currentItem); concatExpr.setType(BTypes.typeXML); } return concatExpr; } private void validateXMLQname(XMLQNameExpr qname, Map<String, Expression> namespaces, Expression defaultNsUri) { qname.setType(BTypes.typeString); String prefix = qname.getPrefix(); if (prefix.isEmpty()) { qname.setNamepsaceUri(defaultNsUri); return; } if (namespaces.containsKey(qname.getPrefix())) { Expression namespaceUri = namespaces.get(qname.getPrefix()); qname.setNamepsaceUri(namespaceUri); } else if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { BLangExceptionHelper.throwSemanticError(qname, SemanticErrors.INVALID_NAMESPACE_PREFIX, prefix); } else { BLangExceptionHelper.throwSemanticError(qname, SemanticErrors.UNDEFINED_NAMESPACE, qname.getPrefix()); } } private void validateXMLLiteralAttributes(List<KeyValueExpr> attributes, Map<String, Expression> namespaces) { for (KeyValueExpr attribute : attributes) { Expression attrNameExpr = attribute.getKeyExpr(); if (attrNameExpr instanceof XMLQNameExpr) { XMLQNameExpr attrQNameRefExpr = (XMLQNameExpr) attrNameExpr; attrQNameRefExpr.isUsedInXML(); BasicLiteral emptyNsUriLiteral = new BasicLiteral(attrNameExpr.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(XMLConstants.NULL_NS_URI)); emptyNsUriLiteral.accept(this); validateXMLQname(attrQNameRefExpr, namespaces, emptyNsUriLiteral); } else { attrNameExpr.accept(this); if (attrNameExpr.getType() != BTypes.typeString) { attrNameExpr = createImplicitStringConversionExpr(attrNameExpr, attrNameExpr.getType()); attribute.setKeyExpr(attrNameExpr); } } Expression attrValueExpr = attribute.getValueExpr(); attrValueExpr.accept(this); if (attrValueExpr.getType() != BTypes.typeString) { attrValueExpr = createImplicitStringConversionExpr(attrValueExpr, attrValueExpr.getType()); attribute.setValueExpr(attrValueExpr); } } } private void validateXMLLiteralEndTag(XMLElementLiteral xmlElementLiteral, Expression defaultNsUri) { Expression startTagName = xmlElementLiteral.getStartTagName(); Expression endTagName = xmlElementLiteral.getEndTagName(); if (endTagName != null) { if (startTagName instanceof XMLQNameExpr && endTagName instanceof XMLQNameExpr) { XMLQNameExpr startName = (XMLQNameExpr) startTagName; XMLQNameExpr endName = (XMLQNameExpr) endTagName; if (!startName.getPrefix().equals(endName.getPrefix()) || !startName.getLocalname().equals(endName.getLocalname())) { BLangExceptionHelper.throwSemanticError(endTagName, SemanticErrors.XML_TAGS_MISMATCH); } } if (((startTagName instanceof XMLQNameExpr) && !(endTagName instanceof XMLQNameExpr)) || (!(startTagName instanceof XMLQNameExpr) && (endTagName instanceof XMLQNameExpr))) { BLangExceptionHelper.throwSemanticError(endTagName, SemanticErrors.XML_TAGS_MISMATCH); } if (endTagName instanceof XMLQNameExpr) { validateXMLQname((XMLQNameExpr) endTagName, xmlElementLiteral.getNamespaces(), defaultNsUri); } else { endTagName.accept(this); } if (endTagName.getType() != BTypes.typeString) { endTagName = createImplicitStringConversionExpr(endTagName, endTagName.getType()); xmlElementLiteral.setEndTagName(endTagName); } } } private Expression createImplicitStringConversionExpr(Expression sExpr, BType sType) { Expression conversionExpr = getImplicitConversionExpr(sExpr, sType, BTypes.typeString); if (conversionExpr == null) { BLangExceptionHelper.throwSemanticError(sExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, sType); } return conversionExpr; } /** * This class holds the results of the type assignability check. * * @since 0.88 */ static class AssignabilityResult { boolean assignable; Expression expression; } }
class SemanticAnalyzer implements NodeVisitor { private static final String ERRORS_PACKAGE = "ballerina.lang.errors"; private static final String BALLERINA_CAST_ERROR = "TypeCastError"; private static final String BALLERINA_CONVERSION_ERROR = "TypeConversionError"; private static final String BALLERINA_ERROR = "Error"; private String currentPkg; private CallableUnit currentCallableUnit = null; private Stack<CallableUnit> parentCallableUnit = new Stack<>(); private Stack<SymbolScope> parentScope = new Stack<>(); private int whileStmtCount = 0; private int transactionStmtCount = 0; private int failedBlockCount = 0; private boolean isWithinWorker = false; private SymbolScope currentScope; private SymbolScope currentPackageScope; private SymbolScope nativeScope; private BlockStmt.BlockStmtBuilder pkgInitFuncStmtBuilder; public SemanticAnalyzer(BLangProgram programScope) { currentScope = programScope; this.nativeScope = programScope.getNativeScope(); } @Override public void visit(BLangProgram bLangProgram) { BLangPackage entryPkg = bLangProgram.getEntryPackage(); if (entryPkg != null) { entryPkg.accept(this); } else { BLangPackage[] blangPackages = bLangProgram.getLibraryPackages(); for (BLangPackage bLangPackage : blangPackages) { bLangPackage.accept(this); } } } @Override public void visit(BLangPackage bLangPackage) { BLangPackage[] dependentPackages = bLangPackage.getDependentPackages(); List<BallerinaFunction> initFunctionList = new ArrayList<>(); for (BLangPackage dependentPkg : dependentPackages) { if (dependentPkg.isSymbolsDefined()) { continue; } dependentPkg.accept(this); initFunctionList.add(dependentPkg.getInitFunction()); } currentScope = bLangPackage; currentPackageScope = currentScope; currentPkg = bLangPackage.getPackagePath(); NodeLocation pkgLocation = bLangPackage.getNodeLocation(); if (pkgLocation == null) { BallerinaFile[] ballerinaFiles = bLangPackage.getBallerinaFiles(); String filename = ballerinaFiles.length == 0 ? "" : ballerinaFiles[0].getFileName(); pkgLocation = new NodeLocation("", filename, 0); } BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(bLangPackage); functionBuilder.setNodeLocation(pkgLocation); functionBuilder.setIdentifier(new Identifier(bLangPackage.getPackagePath() + INIT_FUNCTION_SUFFIX)); functionBuilder.setPkgPath(bLangPackage.getPackagePath()); pkgInitFuncStmtBuilder = new BlockStmt.BlockStmtBuilder(bLangPackage.getNodeLocation(), bLangPackage); addDependentPkgInitCalls(initFunctionList, pkgInitFuncStmtBuilder, pkgLocation); defineStructs(bLangPackage.getStructDefs()); defineConnectors(bLangPackage.getConnectors()); resolveStructFieldTypes(bLangPackage.getStructDefs()); defineFunctions(bLangPackage.getFunctions()); defineServices(bLangPackage.getServices()); defineAnnotations(bLangPackage.getAnnotationDefs()); for (CompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) { compilationUnit.accept(this); } ReturnStmt returnStmt = new ReturnStmt(pkgLocation, null, new Expression[0]); pkgInitFuncStmtBuilder.addStmt(returnStmt); pkgInitFuncStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(pkgInitFuncStmtBuilder.build()); BallerinaFunction initFunction = functionBuilder.buildFunction(); initFunction.setReturnParamTypes(new BType[0]); bLangPackage.setInitFunction(initFunction); bLangPackage.setSymbolsDefined(true); } @Override public void visit(BallerinaFile bFile) { } @Override public void visit(ImportPackage importPkg) { } @Override public void visit(ConstDef constDef) { VariableDefStmt variableDefStmt = constDef.getVariableDefStmt(); variableDefStmt.getVariableDef().setKind(VariableDef.Kind.CONSTANT); variableDefStmt.accept(this); for (AnnotationAttachment annotationAttachment : constDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.CONSTANT, null)); annotationAttachment.accept(this); } SimpleVarRefExpr varRefExpr = new SimpleVarRefExpr(constDef.getNodeLocation(), constDef.getWhiteSpaceDescriptor(), constDef.getName(), null, null); varRefExpr.setVariableDef(constDef); AssignStmt assignStmt = new AssignStmt(constDef.getNodeLocation(), new Expression[]{varRefExpr}, variableDefStmt.getRExpr()); pkgInitFuncStmtBuilder.addStmt(assignStmt); } @Override public void visit(GlobalVariableDef globalVarDef) { VariableDefStmt variableDefStmt = globalVarDef.getVariableDefStmt(); variableDefStmt.getVariableDef().setKind(VariableDef.Kind.GLOBAL_VAR); variableDefStmt.accept(this); if (variableDefStmt.getRExpr() != null) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); pkgInitFuncStmtBuilder.addStmt(assignStmt); } } @Override public void visit(Service service) { openScope(service); for (AnnotationAttachment annotationAttachment : service.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.SERVICE, service.getProtocolPkgPath())); annotationAttachment.accept(this); } for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) { variableDefStmt.getVariableDef().setKind(VariableDef.Kind.SERVICE_VAR); variableDefStmt.accept(this); } createServiceInitFunction(service); for (Resource resource : service.getResources()) { resource.accept(this); } closeScope(); } @Override public void visit(BallerinaConnectorDef connectorDef) { openScope(connectorDef); if (connectorDef.isFilterConnector()) { BType type = BTypes.resolveType(connectorDef.getFilterSupportedType(), currentScope, connectorDef.getNodeLocation()); if (type != null) { if (type instanceof BallerinaConnectorDef) { connectorDef.setFilteredType(type); BallerinaConnectorDef filterConnector = (BallerinaConnectorDef) type; if (!filterConnector.equals(connectorDef)) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.CONNECTOR_TYPES_NOT_EQUIVALENT, connectorDef.getName(), filterConnector.getName()); } } else { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.FILTER_CONNECTOR_MUST_BE_A_CONNECTOR, type.getName()); } } else { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.UNDEFINED_CONNECTOR, connectorDef.getFilterSupportedType()); } } for (AnnotationAttachment annotationAttachment : connectorDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.CONNECTOR, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : connectorDef.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.CONNECTOR_VAR); parameterDef.accept(this); } for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) { variableDefStmt.getVariableDef().setKind(VariableDef.Kind.CONNECTOR_VAR); variableDefStmt.accept(this); } createConnectorInitFunction(connectorDef); for (BallerinaAction action : connectorDef.getActions()) { action.accept(this); } closeScope(); } @Override public void visit(Resource resource) { openScope(resource); currentCallableUnit = resource; for (AnnotationAttachment annotationAttachment : resource.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.RESOURCE, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : resource.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (Worker worker : resource.getWorkers()) { addWorkerSymbol(worker); visit(worker); } BlockStmt blockStmt = resource.getResourceBody(); blockStmt.accept(this); checkAndAddReplyStmt(blockStmt); resolveWorkerInteractions(resource); currentCallableUnit = null; closeScope(); } private void buildWorkerInteractions(CallableUnit callableUnit, Worker[] workers, boolean isWorkerInWorker, boolean isForkJoinStmt) { Map<String, WorkerDataChannel> workerDataChannels = new HashMap<>(); boolean statementCompleted = false; List<Statement> processedStatements = new ArrayList<>(); if (callableUnit.getWorkerInteractionStatements() != null && !callableUnit.getWorkerInteractionStatements().isEmpty()) { String sourceWorkerName; String targetWorkerName; for (Statement statement : callableUnit.getWorkerInteractionStatements()) { statementCompleted = false; if (statement instanceof WorkerInvocationStmt) { targetWorkerName = ((WorkerInvocationStmt) statement).getName(); if (targetWorkerName == "fork" && isForkJoinStmt) { break; } if (callableUnit instanceof Worker) { sourceWorkerName = callableUnit.getName(); } else { sourceWorkerName = "default"; } for (Worker worker : workers) { if (statementCompleted) { break; } Statement[] workerInteractions = worker.getWorkerInteractionStatements(). toArray(new Statement[worker.getWorkerInteractionStatements().size()]); for (Statement workerInteraction : workerInteractions) { if (workerInteraction instanceof WorkerReplyStmt) { String complimentSourceWorkerName = ((WorkerReplyStmt) workerInteraction). getWorkerName(); String complimentTargetWorkerName = worker.getName(); if (sourceWorkerName.equals(complimentSourceWorkerName) && targetWorkerName.equals(complimentTargetWorkerName)) { Expression[] invokeParams = ((WorkerInvocationStmt) statement).getExpressionList(); Expression[] receiveParams = ((WorkerReplyStmt) workerInteraction). getExpressionList(); if (invokeParams.length != receiveParams.length) { break; } else { int i = 0; for (Expression invokeParam : invokeParams) { if (!(receiveParams[i++].getType().equals(invokeParam.getType()))) { break; } } } String interactionName = sourceWorkerName + "->" + targetWorkerName; WorkerDataChannel workerDataChannel; if (!workerDataChannels.containsKey(interactionName)) { workerDataChannel = new WorkerDataChannel(sourceWorkerName, targetWorkerName); workerDataChannels.put(interactionName, workerDataChannel); } else { workerDataChannel = workerDataChannels.get(interactionName); } ((WorkerInvocationStmt) statement).setWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) workerInteraction). setWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) workerInteraction). setEnclosingCallableUnitName(callableUnit.getName()); callableUnit.addWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) statement).setEnclosingCallableUnitName( callableUnit.getName()); ((WorkerInvocationStmt) statement).setPackagePath(callableUnit.getPackagePath()); worker.getWorkerInteractionStatements().remove(workerInteraction); processedStatements.add(statement); statementCompleted = true; break; } } } } } else { sourceWorkerName = ((WorkerReplyStmt) statement).getWorkerName(); if (callableUnit instanceof Worker) { targetWorkerName = callableUnit.getName(); } else { targetWorkerName = "default"; } for (Worker worker : callableUnit.getWorkers()) { if (statementCompleted) { break; } Statement[] workerInteractions = worker.getWorkerInteractionStatements(). toArray(new Statement[worker.getWorkerInteractionStatements().size()]); for (Statement workerInteraction : workerInteractions) { if (workerInteraction instanceof WorkerInvocationStmt) { String complimentTargetWorkerName = ((WorkerInvocationStmt) workerInteraction). getName(); String complimentSourceWorkerName = worker.getName(); if (sourceWorkerName.equals(complimentSourceWorkerName) && targetWorkerName.equals(complimentTargetWorkerName)) { Expression[] invokeParams = ((WorkerReplyStmt) statement).getExpressionList(); Expression[] receiveParams = ((WorkerInvocationStmt) workerInteraction). getExpressionList(); if (invokeParams.length != receiveParams.length) { break; } else { int i = 0; for (Expression invokeParam : invokeParams) { if (!(receiveParams[i++].getType().equals(invokeParam.getType()))) { break; } } } String interactionName = sourceWorkerName + "->" + targetWorkerName; WorkerDataChannel workerDataChannel; if (!workerDataChannels.containsKey(interactionName)) { workerDataChannel = new WorkerDataChannel(sourceWorkerName, targetWorkerName); workerDataChannels.put(interactionName, workerDataChannel); } else { workerDataChannel = workerDataChannels.get(interactionName); } ((WorkerReplyStmt) statement).setWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) workerInteraction). setWorkerDataChannel(workerDataChannel); ((WorkerInvocationStmt) workerInteraction). setEnclosingCallableUnitName(callableUnit.getName()); callableUnit.addWorkerDataChannel(workerDataChannel); ((WorkerReplyStmt) statement).setEnclosingCallableUnitName(callableUnit.getName()); ((WorkerReplyStmt) statement).setPackagePath(callableUnit.getPackagePath()); worker.getWorkerInteractionStatements().remove(workerInteraction); processedStatements.add(statement); statementCompleted = true; break; } } } } } if (!statementCompleted && !isWorkerInWorker) { BLangExceptionHelper.throwSemanticError(statement, SemanticErrors.WORKER_INTERACTION_NOT_VALID); } } callableUnit.getWorkerInteractionStatements().removeAll(processedStatements); } } private void resolveWorkerInteractions(CallableUnit callableUnit) { boolean isWorkerInWorker = callableUnit instanceof Worker; boolean isForkJoinStmt = callableUnit instanceof ForkJoinStmt; Worker[] workers = callableUnit.getWorkers(); if (workers.length > 0) { Worker[] tempWorkers = new Worker[workers.length]; System.arraycopy(workers, 0, tempWorkers, 0, tempWorkers.length); int i = 0; do { buildWorkerInteractions(callableUnit, tempWorkers, isWorkerInWorker, isForkJoinStmt); callableUnit = workers[i]; i++; System.arraycopy(workers, i, tempWorkers, 0, workers.length - i); } while (i < workers.length); } } private void resolveForkJoin(ForkJoinStmt forkJoinStmt) { Worker[] workers = forkJoinStmt.getWorkers(); if (workers != null && workers.length > 0) { for (Worker worker : workers) { for (Statement statement : worker.getWorkerInteractionStatements()) { if (statement instanceof WorkerInvocationStmt) { String targetWorkerName = ((WorkerInvocationStmt) statement).getName(); if (targetWorkerName.equalsIgnoreCase("fork")) { String sourceWorkerName = worker.getName(); WorkerDataChannel workerDataChannel = new WorkerDataChannel (sourceWorkerName, targetWorkerName); ((WorkerInvocationStmt) statement).setWorkerDataChannel(workerDataChannel); currentCallableUnit.addWorkerDataChannel(workerDataChannel); } } } } } } @Override public void visit(BallerinaFunction function) { openScope(function); currentCallableUnit = function; for (AnnotationAttachment annotationAttachment : function.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.FUNCTION, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : function.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (ParameterDef parameterDef : function.getReturnParameters()) { if (parameterDef.getName() != null) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); } parameterDef.accept(this); } if (!function.isNative()) { for (Worker worker : function.getWorkers()) { worker.accept(this); } BlockStmt blockStmt = function.getCallableUnitBody(); blockStmt.accept(this); if (function.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.MISSING_RETURN_STATEMENT); } checkAndAddReturnStmt(function.getReturnParamTypes().length, blockStmt); } resolveWorkerInteractions(function); currentCallableUnit = null; closeScope(); } @Override public void visit(BTypeMapper typeMapper) { } @Override public void visit(BallerinaAction action) { openScope(action); currentCallableUnit = action; for (AnnotationAttachment annotationAttachment : action.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.ACTION, null)); annotationAttachment.accept(this); } for (ParameterDef parameterDef : action.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (ParameterDef parameterDef : action.getReturnParameters()) { if (parameterDef.getName() != null) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); } parameterDef.accept(this); } if (!action.isNative()) { for (Worker worker : action.getWorkers()) { worker.accept(this); } BlockStmt blockStmt = action.getCallableUnitBody(); blockStmt.accept(this); if (action.getReturnParameters().length > 0 && !blockStmt.isAlwaysReturns()) { BLangExceptionHelper.throwSemanticError(action, SemanticErrors.MISSING_RETURN_STATEMENT); } checkAndAddReturnStmt(action.getReturnParameters().length, blockStmt); } resolveWorkerInteractions(action); currentCallableUnit = null; closeScope(); } @Override public void visit(Worker worker) { parentScope.push(currentScope); currentScope = worker; parentCallableUnit.push(currentCallableUnit); currentCallableUnit = worker; for (ParameterDef parameterDef : worker.getParameterDefs()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (ParameterDef parameterDef : worker.getReturnParameters()) { parameterDef.setKind(VariableDef.Kind.LOCAL_VAR); parameterDef.accept(this); } for (Worker worker2 : worker.getWorkers()) { addWorkerSymbol(worker2); worker2.accept(this); } BlockStmt blockStmt = worker.getCallableUnitBody(); isWithinWorker = true; blockStmt.accept(this); isWithinWorker = false; currentCallableUnit = parentCallableUnit.pop(); currentScope = parentScope.pop(); } private void addWorkerSymbol(Worker worker) { SymbolName symbolName = worker.getSymbolName(); BLangSymbol varSymbol = currentScope.resolve(symbolName); if (varSymbol != null) { BLangExceptionHelper.throwSemanticError(worker, SemanticErrors.REDECLARED_SYMBOL, worker.getName()); } currentScope.define(symbolName, worker); } @Override public void visit(StructDef structDef) { for (AnnotationAttachment annotationAttachment : structDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.STRUCT, null)); annotationAttachment.accept(this); } } @Override public void visit(AnnotationAttachment annotation) { AnnotationAttachmentPoint attachedPoint = annotation.getAttachedPoint(); SymbolName annotationSymName = new SymbolName(annotation.getName(), annotation.getPkgPath()); BLangSymbol annotationSymbol = currentScope.resolve(annotationSymName); if (!(annotationSymbol instanceof AnnotationDef)) { BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.UNDEFINED_ANNOTATION, annotationSymName); } AnnotationDef annotationDef = (AnnotationDef) annotationSymbol; if (annotationDef.getAttachmentPoints() != null && annotationDef.getAttachmentPoints().length > 0) { Optional<AnnotationAttachmentPoint> matchingAttachmentPoint = Arrays .stream(annotationDef.getAttachmentPoints()) .filter(attachmentPoint -> attachmentPoint.equals(attachedPoint)) .findAny(); if (!matchingAttachmentPoint.isPresent()) { String msg = attachedPoint.getAttachmentPoint().getValue(); if (attachedPoint.getPkgPath() != null) { msg = attachedPoint.getAttachmentPoint().getValue() + "<" + attachedPoint.getPkgPath() + ">"; } throw BLangExceptionHelper.getSemanticError(annotation.getNodeLocation(), SemanticErrors.ANNOTATION_NOT_ALLOWED, annotationSymName, msg); } } validateAttributes(annotation, annotationDef); populateDefaultValues(annotation, annotationDef); } /** * Visit and validate attributes of an annotation attachment. * * @param annotation Annotation attachment to validate attributes * @param annotationDef Definition of the annotation */ private void validateAttributes(AnnotationAttachment annotation, AnnotationDef annotationDef) { annotation.getAttributeNameValuePairs().forEach((attributeName, attributeValue) -> { BLangSymbol attributeSymbol = annotationDef.resolveMembers(new SymbolName(attributeName)); if (attributeSymbol == null || !(attributeSymbol instanceof AnnotationAttributeDef)) { BLangExceptionHelper.throwSemanticError(annotation, SemanticErrors.NO_SUCH_ATTRIBUTE, attributeName, annotation.getName()); } AnnotationAttributeDef attributeDef = ((AnnotationAttributeDef) attributeSymbol); SimpleTypeName attributeType = attributeDef.getTypeName(); if (attributeValue.getVarRefExpr() != null) { SimpleVarRefExpr varRefExpr = attributeValue.getVarRefExpr(); visitSingleValueExpr(varRefExpr); if (!(varRefExpr.getVariableDef() instanceof ConstDef)) { throw BLangExceptionHelper.getSemanticError(attributeValue.getNodeLocation(), SemanticErrors.ATTRIBUTE_VAL_CANNOT_REFER_NON_CONST); } attributeValue.setType(varRefExpr.getType()); BType lhsType = BTypes.resolveType(attributeType, currentScope, annotation.getNodeLocation()); if (lhsType != varRefExpr.getType()) { throw BLangExceptionHelper.getSemanticError(attributeValue.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, lhsType, varRefExpr.getType()); } return; } SimpleTypeName valueType = attributeValue.getTypeName(); BLangSymbol valueTypeSymbol = currentScope.resolve(valueType.getSymbolName()); BLangSymbol attributeTypeSymbol = annotationDef.resolve(new SymbolName(attributeType.getName(), attributeType.getPackagePath())); if (attributeType.isArrayType()) { if (!valueType.isArrayType()) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName() + TypeConstants.ARRAY_TNAME, valueTypeSymbol.getSymbolName()); } AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray(); for (AnnotationAttributeValue value : valuesArray) { valueTypeSymbol = currentScope.resolve(value.getTypeName().getSymbolName()); if (attributeTypeSymbol != valueTypeSymbol) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName()); } AnnotationAttachment childAnnotation = value.getAnnotationValue(); if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) { validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol); } } } else { if (valueType.isArrayType()) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES_ARRAY_FOUND, attributeTypeSymbol.getName()); } if (attributeTypeSymbol != valueTypeSymbol) { BLangExceptionHelper.throwSemanticError(attributeValue, SemanticErrors.INCOMPATIBLE_TYPES, attributeTypeSymbol.getSymbolName(), valueTypeSymbol.getSymbolName()); } AnnotationAttachment childAnnotation = attributeValue.getAnnotationValue(); if (childAnnotation != null && valueTypeSymbol instanceof AnnotationDef) { validateAttributes(childAnnotation, (AnnotationDef) valueTypeSymbol); } } }); } /** * Populate default values to the annotation attributes. * * @param annotation Annotation attachment to populate default values * @param annotationDef Definition of the annotation corresponds to the provided annotation attachment */ private void populateDefaultValues(AnnotationAttachment annotation, AnnotationDef annotationDef) { Map<String, AnnotationAttributeValue> attributeValPairs = annotation.getAttributeNameValuePairs(); for (AnnotationAttributeDef attributeDef : annotationDef.getAttributeDefs()) { String attributeName = attributeDef.getName(); if (!attributeValPairs.containsKey(attributeName)) { BasicLiteral defaultValue = attributeDef.getAttributeValue(); if (defaultValue != null) { annotation.addAttributeNameValuePair(attributeName, new AnnotationAttributeValue(defaultValue.getBValue(), defaultValue.getTypeName(), null, null)); } continue; } AnnotationAttributeValue attributeValue = attributeValPairs.get(attributeName); if (attributeValue.getVarRefExpr() != null) { continue; } SimpleTypeName valueType = attributeValue.getTypeName(); if (valueType.isArrayType()) { AnnotationAttributeValue[] valuesArray = attributeValue.getValueArray(); for (AnnotationAttributeValue value : valuesArray) { AnnotationAttachment annotationTypeVal = value.getAnnotationValue(); if (annotationTypeVal == null) { continue; } SimpleTypeName attributeType = attributeDef.getTypeName(); BLangSymbol attributeTypeSymbol = annotationDef.resolve( new SymbolName(attributeType.getName(), attributeType.getPackagePath())); if (attributeTypeSymbol instanceof AnnotationDef) { populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol); } } } else { AnnotationAttachment annotationTypeVal = attributeValue.getAnnotationValue(); if (annotationTypeVal == null) { continue; } BLangSymbol attributeTypeSymbol = annotationDef.resolve(attributeDef.getTypeName().getSymbolName()); if (attributeTypeSymbol instanceof AnnotationDef) { populateDefaultValues(annotationTypeVal, (AnnotationDef) attributeTypeSymbol); } } } } @Override public void visit(AnnotationAttributeDef annotationAttributeDef) { SimpleTypeName fieldType = annotationAttributeDef.getTypeName(); BasicLiteral fieldVal = annotationAttributeDef.getAttributeValue(); if (fieldVal != null) { fieldVal.accept(this); BType valueType = fieldVal.getType(); if (!BTypes.isBuiltInTypeName(fieldType.getName())) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE); } BLangSymbol typeSymbol = currentScope.resolve(fieldType.getSymbolName()); BType fieldBType = (BType) typeSymbol; if (!BTypes.isValueType(fieldBType)) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_DEFAULT_VALUE); } if (fieldBType != valueType) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, fieldType, fieldVal.getTypeName()); } } else { BLangSymbol typeSymbol; if (fieldType.isArrayType()) { typeSymbol = currentScope.resolve(new SymbolName(fieldType.getName(), fieldType.getPackagePath())); } else { typeSymbol = currentScope.resolve(fieldType.getSymbolName()); } if (((typeSymbol instanceof BType) && !BTypes.isValueType((BType) typeSymbol)) || (!(typeSymbol instanceof BType) && !(typeSymbol instanceof AnnotationDef))) { BLangExceptionHelper.throwSemanticError(annotationAttributeDef, SemanticErrors.INVALID_ATTRIBUTE_TYPE, fieldType); } if (!(typeSymbol instanceof BType)) { fieldType.setPkgPath(annotationAttributeDef.getPackagePath()); } } } @Override public void visit(AnnotationDef annotationDef) { for (AnnotationAttributeDef fields : annotationDef.getAttributeDefs()) { fields.accept(this); } for (AnnotationAttachment annotationAttachment : annotationDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.ANNOTATION, null)); annotationAttachment.accept(this); } } @Override public void visit(ParameterDef paramDef) { BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); if (paramDef.getAnnotations() == null) { return; } for (AnnotationAttachment annotationAttachment : paramDef.getAnnotations()) { annotationAttachment.setAttachedPoint(new AnnotationAttachmentPoint(AttachmentPoint.PARAMETER, null)); annotationAttachment.accept(this); } } @Override public void visit(SimpleVariableDef varDef) { } @Override public void visit(VariableDefStmt varDefStmt) { VariableDef varDef = varDefStmt.getVariableDef(); BType lhsType = BTypes.resolveType(varDef.getTypeName(), currentScope, varDef.getNodeLocation()); varDef.setType(lhsType); if (varDef.getKind() == null) { varDef.setKind(VariableDef.Kind.LOCAL_VAR); } ((VariableReferenceExpr) varDefStmt.getLExpr()).setLHSExpr(true); SymbolName symbolName = new SymbolName(varDef.getName(), currentPkg); BLangSymbol varSymbol = currentScope.resolve(symbolName); if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { BLangExceptionHelper.throwSemanticError(varDef, SemanticErrors.REDECLARED_SYMBOL, varDef.getName()); } currentScope.define(symbolName, varDef); Expression rExpr = varDefStmt.getRExpr(); if (rExpr == null) { return; } if (rExpr instanceof RefTypeInitExpr) { RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lhsType); varDefStmt.setRExpr(refTypeInitExpr); refTypeInitExpr.accept(this); return; } BType rhsType; if (rExpr instanceof ExecutableMultiReturnExpr) { rExpr.accept(this); ExecutableMultiReturnExpr multiReturnExpr = (ExecutableMultiReturnExpr) rExpr; BType[] returnTypes = multiReturnExpr.getTypes(); if (returnTypes.length != 1) { BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, "1", returnTypes.length); } rhsType = returnTypes[0]; } else { visitSingleValueExpr(rExpr); rhsType = rExpr.getType(); } AssignabilityResult result = performAssignabilityCheck(lhsType, rExpr); if (result.expression != null) { varDefStmt.setRExpr(result.expression); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(varDefStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lhsType); } } @Override public void visit(AssignStmt assignStmt) { Expression[] lExprs = assignStmt.getLExprs(); visitLExprsOfAssignment(assignStmt, lExprs); Expression rExpr = assignStmt.getRExpr(); if (rExpr instanceof FunctionInvocationExpr || rExpr instanceof ActionInvocationExpr) { rExpr.accept(this); if (assignStmt.isDeclaredWithVar()) { assignVariableRefTypes(lExprs, ((CallableUnitInvocationExpr) rExpr).getTypes()); } checkForMultiAssignmentErrors(assignStmt, lExprs, (CallableUnitInvocationExpr) rExpr); return; } if (lExprs.length > 1 && (rExpr instanceof TypeCastExpression || rExpr instanceof TypeConversionExpr)) { ((AbstractExpression) rExpr).setMultiReturnAvailable(true); rExpr.accept(this); if (assignStmt.isDeclaredWithVar()) { assignVariableRefTypes(lExprs, ((ExecutableMultiReturnExpr) rExpr).getTypes()); } checkForMultiValuedCastingErrors(assignStmt, lExprs, (ExecutableMultiReturnExpr) rExpr); return; } Expression lExpr = assignStmt.getLExprs()[0]; BType lhsType = lExpr.getType(); if (rExpr instanceof RefTypeInitExpr) { if (assignStmt.isDeclaredWithVar()) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INVALID_VAR_ASSIGNMENT); } RefTypeInitExpr refTypeInitExpr = getNestedInitExpr(rExpr, lhsType); assignStmt.setRExpr(refTypeInitExpr); refTypeInitExpr.accept(this); return; } visitSingleValueExpr(rExpr); BType rhsType = rExpr.getType(); if (assignStmt.isDeclaredWithVar()) { ((SimpleVarRefExpr) lExpr).getVariableDef().setType(rhsType); lhsType = rhsType; } AssignabilityResult result = performAssignabilityCheck(lhsType, rExpr); if (result.expression != null) { assignStmt.setRExpr(result.expression); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lhsType); } } @Override public void visit(BlockStmt blockStmt) { openScope(blockStmt); for (int stmtIndex = 0; stmtIndex < blockStmt.getStatements().length; stmtIndex++) { Statement stmt = blockStmt.getStatements()[stmtIndex]; if (stmt instanceof BreakStmt && whileStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.BREAK_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof ContinueStmt && whileStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.CONTINUE_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof AbortStmt && transactionStmtCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.ABORT_STMT_NOT_ALLOWED_HERE); } if (stmt instanceof RetryStmt && failedBlockCount < 1) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.RETRY_STMT_NOT_ALLOWED_HERE); } if (isWithinWorker) { if (stmt instanceof ReplyStmt) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.REPLY_STMT_NOT_ALLOWED_HERE); } } if (stmt instanceof BreakStmt || stmt instanceof ContinueStmt || stmt instanceof ReplyStmt || stmt instanceof AbortStmt || stmt instanceof RetryStmt) { checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1); } stmt.accept(this); if (stmt.isAlwaysReturns()) { checkUnreachableStmt(blockStmt.getStatements(), stmtIndex + 1); blockStmt.setAlwaysReturns(true); } } closeScope(); } @Override public void visit(CommentStmt commentStmt) { } @Override public void visit(IfElseStmt ifElseStmt) { boolean stmtReturns = true; Expression expr = ifElseStmt.getCondition(); visitSingleValueExpr(expr); if (expr.getType() != BTypes.typeBoolean) { BLangExceptionHelper .throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType()); } Statement thenBody = ifElseStmt.getThenBody(); thenBody.accept(this); stmtReturns &= thenBody.isAlwaysReturns(); for (IfElseStmt.ElseIfBlock elseIfBlock : ifElseStmt.getElseIfBlocks()) { Expression elseIfCondition = elseIfBlock.getElseIfCondition(); visitSingleValueExpr(elseIfCondition); if (elseIfCondition.getType() != BTypes.typeBoolean) { BLangExceptionHelper.throwSemanticError(ifElseStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, elseIfCondition.getType()); } Statement elseIfBody = elseIfBlock.getElseIfBody(); elseIfBody.accept(this); stmtReturns &= elseIfBody.isAlwaysReturns(); } Statement elseBody = ifElseStmt.getElseBody(); if (elseBody != null) { elseBody.accept(this); stmtReturns &= elseBody.isAlwaysReturns(); } else { stmtReturns = false; } ifElseStmt.setAlwaysReturns(stmtReturns); } @Override public void visit(WhileStmt whileStmt) { whileStmtCount++; Expression expr = whileStmt.getCondition(); visitSingleValueExpr(expr); if (expr.getType() != BTypes.typeBoolean) { BLangExceptionHelper .throwSemanticError(whileStmt, SemanticErrors.INCOMPATIBLE_TYPES_BOOLEAN_EXPECTED, expr.getType()); } BlockStmt blockStmt = whileStmt.getBody(); if (blockStmt.getStatements().length == 0) { BLangExceptionHelper.throwSemanticError(blockStmt, SemanticErrors.NO_STATEMENTS_WHILE_LOOP); } blockStmt.accept(this); whileStmtCount--; } @Override public void visit(BreakStmt breakStmt) { checkParent(breakStmt); } @Override public void visit(ContinueStmt continueStmt) { checkParent(continueStmt); } @Override public void visit(TryCatchStmt tryCatchStmt) { tryCatchStmt.getTryBlock().accept(this); BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_ERROR, ERRORS_PACKAGE)); Set<BType> definedTypes = new HashSet<>(); if (tryCatchStmt.getCatchBlocks().length != 0) { if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(tryCatchStmt, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_ERROR); } } for (TryCatchStmt.CatchBlock catchBlock : tryCatchStmt.getCatchBlocks()) { catchBlock.getParameterDef().setKind(VariableDef.Kind.LOCAL_VAR); catchBlock.getParameterDef().accept(this); if (!error.equals(catchBlock.getParameterDef().getType()) && (!(catchBlock.getParameterDef().getType() instanceof StructDef) || TypeLattice.getExplicitCastLattice().getEdgeFromTypes(catchBlock.getParameterDef() .getType(), error, null) == null)) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( catchBlock.getCatchBlockStmt().getNodeLocation(), SemanticErrors.ONLY_ERROR_TYPE_ALLOWED_HERE)); } if (!definedTypes.add(catchBlock.getParameterDef().getType())) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( catchBlock.getCatchBlockStmt().getNodeLocation(), SemanticErrors.DUPLICATED_ERROR_CATCH, catchBlock.getParameterDef().getTypeName())); } catchBlock.getCatchBlockStmt().accept(this); } if (tryCatchStmt.getFinallyBlock() != null) { tryCatchStmt.getFinallyBlock().getFinallyBlockStmt().accept(this); } } @Override public void visit(ThrowStmt throwStmt) { throwStmt.getExpr().accept(this); BType expressionType = null; if (throwStmt.getExpr() instanceof SimpleVarRefExpr && throwStmt.getExpr().getType() instanceof StructDef) { expressionType = throwStmt.getExpr().getType(); } else if (throwStmt.getExpr() instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) throwStmt.getExpr(); if (!funcIExpr.isMultiReturnExpr() && funcIExpr.getTypes().length == 1 && funcIExpr.getTypes()[0] instanceof StructDef) { expressionType = funcIExpr.getTypes()[0]; } } if (expressionType != null) { BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_ERROR, ERRORS_PACKAGE)); if (error == null) { BLangExceptionHelper.throwSemanticError(throwStmt, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_ERROR); } if (error.equals(expressionType) || TypeLattice.getExplicitCastLattice().getEdgeFromTypes (expressionType, error, null) != null) { throwStmt.setAlwaysReturns(true); return; } } throw new SemanticException(BLangExceptionHelper.constructSemanticError( throwStmt.getNodeLocation(), SemanticErrors.ONLY_ERROR_TYPE_ALLOWED_HERE)); } @Override public void visit(FunctionInvocationStmt functionInvocationStmt) { functionInvocationStmt.getFunctionInvocationExpr().accept(this); } @Override public void visit(ActionInvocationStmt actionInvocationStmt) { actionInvocationStmt.getActionInvocationExpr().accept(this); } @Override public void visit(WorkerInvocationStmt workerInvocationStmt) { Expression[] expressions = workerInvocationStmt.getExpressionList(); BType[] bTypes = new BType[expressions.length]; int p = 0; for (Expression expression : expressions) { expression.accept(this); bTypes[p++] = expression.getType(); } workerInvocationStmt.setTypes(bTypes); if (workerInvocationStmt.getCallableUnitName() != null && !workerInvocationStmt.getCallableUnitName().equals("default") && !workerInvocationStmt.getCallableUnitName().equals("fork")) { linkWorker(workerInvocationStmt); } } @Override public void visit(WorkerReplyStmt workerReplyStmt) { String workerName = workerReplyStmt.getWorkerName(); SymbolName workerSymbol = new SymbolName(workerName); Expression[] expressions = workerReplyStmt.getExpressionList(); BType[] bTypes = new BType[expressions.length]; int p = 0; for (Expression expression : expressions) { expression.accept(this); bTypes[p++] = expression.getType(); } workerReplyStmt.setTypes(bTypes); if (!workerName.equals("default")) { BLangSymbol worker = currentScope.resolve(workerSymbol); if (!(worker instanceof Worker)) { BLangExceptionHelper.throwSemanticError(expressions[0], SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, workerSymbol); } workerReplyStmt.setWorker((Worker) worker); } } @Override public void visit(ForkJoinStmt forkJoinStmt) { boolean stmtReturns = true; openScope(forkJoinStmt); for (Worker worker : forkJoinStmt.getWorkers()) { worker.accept(this); } ForkJoinStmt.Join join = forkJoinStmt.getJoin(); openScope(join); ParameterDef parameter = join.getJoinResult(); if (parameter != null) { parameter.setKind(VariableDef.Kind.LOCAL_VAR); parameter.accept(this); join.define(parameter.getSymbolName(), parameter); if (!(parameter.getType() instanceof BMapType)) { throw new SemanticException("Incompatible types: expected map in " + parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation(). getLineNumber()); } } Statement joinBody = join.getJoinBlock(); if (joinBody != null) { joinBody.accept(this); stmtReturns &= joinBody.isAlwaysReturns(); } closeScope(); ForkJoinStmt.Timeout timeout = forkJoinStmt.getTimeout(); openScope(timeout); Expression timeoutExpr = timeout.getTimeoutExpression(); if (timeoutExpr != null) { timeoutExpr.accept(this); } ParameterDef timeoutParam = timeout.getTimeoutResult(); if (timeoutParam != null) { timeoutParam.accept(this); timeout.define(timeoutParam.getSymbolName(), timeoutParam); if (!(parameter.getType() instanceof BMapType)) { throw new SemanticException("Incompatible types: expected map in " + parameter.getNodeLocation().getFileName() + ":" + parameter.getNodeLocation().getLineNumber()); } } Statement timeoutBody = timeout.getTimeoutBlock(); if (timeoutBody != null) { timeoutBody.accept(this); stmtReturns &= timeoutBody.isAlwaysReturns(); } resolveWorkerInteractions(forkJoinStmt); resolveForkJoin(forkJoinStmt); closeScope(); forkJoinStmt.setAlwaysReturns(stmtReturns); closeScope(); } @Override public void visit(TransactionStmt transactionStmt) { transactionStmtCount++; transactionStmt.getTransactionBlock().accept(this); transactionStmtCount--; TransactionStmt.FailedBlock failedBlock = transactionStmt.getFailedBlock(); if (failedBlock != null) { failedBlockCount++; failedBlock.getFailedBlockStmt().accept(this); failedBlockCount--; } TransactionStmt.AbortedBlock abortedBlock = transactionStmt.getAbortedBlock(); if (abortedBlock != null) { abortedBlock.getAbortedBlockStmt().accept(this); } TransactionStmt.CommittedBlock committedBlock = transactionStmt.getCommittedBlock(); if (committedBlock != null) { committedBlock.getCommittedBlockStmt().accept(this); } } @Override public void visit(AbortStmt abortStmt) { } @Override public void visit(RetryStmt retryStmt) { retryStmt.getRetryCountExpression().accept(this); checkRetryStmtValidity(retryStmt); } @Override public void visit(ReplyStmt replyStmt) { if (currentCallableUnit instanceof Function) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_FUNCTION); } else if (currentCallableUnit instanceof Action) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.REPLY_STATEMENT_CANNOT_USED_IN_ACTION); } if (replyStmt.getReplyExpr() instanceof ActionInvocationExpr) { BLangExceptionHelper.throwSemanticError(currentCallableUnit, SemanticErrors.ACTION_INVOCATION_NOT_ALLOWED_IN_REPLY); } Expression replyExpr = replyStmt.getReplyExpr(); if (replyExpr != null) { visitSingleValueExpr(replyExpr); if (replyExpr.getType() != BTypes.typeMessage) { BLangExceptionHelper.throwSemanticError(replyExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeMessage, replyExpr.getType()); } } } @Override public void visit(ReturnStmt returnStmt) { if (currentCallableUnit instanceof Resource) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_RESOURCE); } if (transactionStmtCount > 0) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.RETURN_CANNOT_USED_IN_TRANSACTION); } Expression[] returnArgExprs = returnStmt.getExprs(); ParameterDef[] returnParamsOfCU = currentCallableUnit.getReturnParameters(); if (returnArgExprs.length == 0 && returnParamsOfCU.length == 0) { return; } if (returnArgExprs.length == 0 && returnParamsOfCU[0].getName() != null) { Expression[] returnExprs = new Expression[returnParamsOfCU.length]; for (int i = 0; i < returnParamsOfCU.length; i++) { SimpleVarRefExpr variableRefExpr = new SimpleVarRefExpr(returnStmt.getNodeLocation(), returnStmt.getWhiteSpaceDescriptor(), returnParamsOfCU[i].getSymbolName().getName(), null, returnParamsOfCU[i].getSymbolName().getPkgPath()); visit(variableRefExpr); returnExprs[i] = variableRefExpr; } returnStmt.setExprs(returnExprs); return; } else if (returnArgExprs.length == 0) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } BType[] typesOfReturnExprs = new BType[returnArgExprs.length]; for (int i = 0; i < returnArgExprs.length; i++) { Expression returnArgExpr = returnArgExprs[i]; returnArgExpr.accept(this); typesOfReturnExprs[i] = returnArgExpr.getType(); } if (returnArgExprs.length == 1 && returnArgExprs[0] instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) returnArgExprs[0]; BType[] funcIExprReturnTypes = funcIExpr.getTypes(); if (funcIExprReturnTypes.length > returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN); } else if (funcIExprReturnTypes.length < returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } for (int i = 0; i < returnParamsOfCU.length; i++) { BType lhsType = returnParamsOfCU[i].getType(); BType rhsType = funcIExprReturnTypes[i]; if (isAssignableTo(lhsType, rhsType)) { continue; } BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, lhsType, rhsType); } return; } if (typesOfReturnExprs.length > returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.TOO_MANY_ARGUMENTS_TO_RETURN); } else if (typesOfReturnExprs.length < returnParamsOfCU.length) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.NOT_ENOUGH_ARGUMENTS_TO_RETURN); } else { for (int i = 0; i < returnParamsOfCU.length; i++) { if (returnArgExprs[i] instanceof FunctionInvocationExpr) { FunctionInvocationExpr funcIExpr = ((FunctionInvocationExpr) returnArgExprs[i]); if (funcIExpr.getTypes().length > 1) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT, funcIExpr.getCallableUnit().getName()); } } BType lhsType = returnParamsOfCU[i].getType(); BType rhsType = typesOfReturnExprs[i]; AssignabilityResult result = performAssignabilityCheck(lhsType, returnArgExprs[i]); if (result.expression != null) { returnArgExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(returnStmt, SemanticErrors.CANNOT_USE_TYPE_IN_RETURN_STATEMENT, lhsType, rhsType); } } } } @Override public void visit(TransformStmt transformStmt) { BlockStmt blockStmt = transformStmt.getBody(); if (blockStmt.getStatements().length == 0) { BLangExceptionHelper.throwSemanticError(transformStmt, SemanticErrors.TRANSFORM_STATEMENT_NO_BODY); } blockStmt.accept(this); } @Override public void visit(InstanceCreationExpr instanceCreationExpr) { visitSingleValueExpr(instanceCreationExpr); if (BTypes.isValueType(instanceCreationExpr.getType())) { BLangExceptionHelper.throwSemanticError(instanceCreationExpr, SemanticErrors.CANNOT_USE_CREATE_FOR_VALUE_TYPES, instanceCreationExpr.getType()); } } @Override public void visit(FunctionInvocationExpr funcIExpr) { Expression[] exprs = funcIExpr.getArgExprs(); for (Expression expr : exprs) { visitSingleValueExpr(expr); } linkFunction(funcIExpr); if (funcIExpr.isFunctionPointerInvocation()) { BFunctionType type = (BFunctionType) funcIExpr.getFunctionPointerVariableDef().getType(); funcIExpr.setTypes(type.getReturnParameterType()); } else { BType[] returnParamTypes = funcIExpr.getCallableUnit().getReturnParamTypes(); funcIExpr.setTypes(returnParamTypes); } } @Override public void visit(ActionInvocationExpr actionIExpr) { String pkgPath = actionIExpr.getPackagePath(); String name = actionIExpr.getConnectorName(); SymbolName symbolName = new SymbolName(name, pkgPath); BLangSymbol bLangSymbol = currentScope.resolve(symbolName); if (bLangSymbol instanceof SimpleVariableDef) { if (((SimpleVariableDef) bLangSymbol).getType() instanceof StructDef) { StructDef structDef = (StructDef) ((SimpleVariableDef) bLangSymbol).getType(); VariableDef matchingVariableDef = null; for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) { VariableDef variableDef = variableDefStmt.getVariableDef(); if (variableDef.getType() instanceof BFunctionType && variableDef.getIdentifier().getName().equals(actionIExpr.getName())) { matchingVariableDef = variableDef; break; } } if (matchingVariableDef == null) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.UNDEFINED_FUNCTION, actionIExpr.getName()); } BFunctionType functionType = (BFunctionType) matchingVariableDef.getType(); Expression[] exprs = actionIExpr.getArgExprs(); if (exprs == null || functionType.getParameterType().length != exprs.length) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, actionIExpr.getName()); } for (Expression expr : exprs) { visitSingleValueExpr(expr); } for (int i = 0; i < exprs.length; i++) { if (!isAssignableTo(exprs[i].getType(), functionType.getParameterType()[i])) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, actionIExpr.getName()); } } actionIExpr.setTypes(functionType.getReturnParameterType()); actionIExpr.setFunctionInvocation(true); actionIExpr.setVariableDef((SimpleVariableDef) bLangSymbol); actionIExpr.setFieldDef(matchingVariableDef); return; } if (!(((SimpleVariableDef) bLangSymbol).getType() instanceof BallerinaConnectorDef)) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INCORRECT_ACTION_INVOCATION); } Expression[] exprs = new Expression[actionIExpr.getArgExprs().length + 1]; SimpleVarRefExpr variableRefExpr = new SimpleVarRefExpr(actionIExpr.getNodeLocation(), null, name, null, pkgPath); exprs[0] = variableRefExpr; for (int i = 0; i < actionIExpr.getArgExprs().length; i++) { exprs[i + 1] = actionIExpr.getArgExprs()[i]; } actionIExpr.setArgExprs(exprs); SimpleVariableDef varDef = (SimpleVariableDef) bLangSymbol; actionIExpr.setConnectorName(varDef.getTypeName().getName()); actionIExpr.setPackageName(varDef.getTypeName().getPackageName()); actionIExpr.setPackagePath(varDef.getTypeName().getPackagePath()); } else if (bLangSymbol instanceof BallerinaConnectorDef) { throw BLangExceptionHelper.getSemanticError(actionIExpr.getNodeLocation(), SemanticErrors.INVALID_ACTION_INVOCATION); } Expression[] exprs = actionIExpr.getArgExprs(); for (Expression expr : exprs) { visitSingleValueExpr(expr); } linkAction(actionIExpr); BType[] returnParamTypes = actionIExpr.getCallableUnit().getReturnParamTypes(); actionIExpr.setTypes(returnParamTypes); } @Override public void visit(BasicLiteral basicLiteral) { BType bType = BTypes.resolveType(basicLiteral.getTypeName(), currentScope, basicLiteral.getNodeLocation()); basicLiteral.setType(bType); } @Override public void visit(DivideExpr divideExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(divideExpr); validateBinaryExprTypeForIntFloat(divideExpr, binaryExprType); } @Override public void visit(ModExpression modExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(modExpr); validateBinaryExprTypeForIntFloat(modExpr, binaryExprType); } @Override public void visit(UnaryExpression unaryExpr) { visitSingleValueExpr(unaryExpr.getRExpr()); unaryExpr.setType(unaryExpr.getRExpr().getType()); if (Operator.SUB.equals(unaryExpr.getOperator()) || Operator.ADD.equals(unaryExpr.getOperator())) { if (unaryExpr.getType() != BTypes.typeInt && unaryExpr.getType() != BTypes.typeFloat) { throwInvalidUnaryOpError(unaryExpr); } } else if (Operator.NOT.equals(unaryExpr.getOperator())) { if (unaryExpr.getType() != BTypes.typeBoolean) { throwInvalidUnaryOpError(unaryExpr); } } else if (Operator.TYPEOF.equals(unaryExpr.getOperator())) { unaryExpr.setType(BTypes.typeType); } else if (Operator.LENGTHOF.equals(unaryExpr.getOperator())) { BType rType = unaryExpr.getRExpr().getType(); if (!((rType instanceof BArrayType) || (rType == BTypes.typeJSON))) { throwInvalidUnaryOpError(unaryExpr); } unaryExpr.setType(BTypes.typeInt); } else { BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.UNKNOWN_OPERATOR_IN_UNARY, unaryExpr.getOperator()); } } @Override public void visit(AddExpression addExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(addExpr); if (binaryExprType != BTypes.typeInt && binaryExprType != BTypes.typeFloat && binaryExprType != BTypes.typeString && binaryExprType != BTypes.typeXML) { throwInvalidBinaryOpError(addExpr); } } @Override public void visit(MultExpression multExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(multExpr); validateBinaryExprTypeForIntFloat(multExpr, binaryExprType); } @Override public void visit(SubtractExpression subtractExpr) { BType binaryExprType = verifyBinaryArithmeticExprType(subtractExpr); validateBinaryExprTypeForIntFloat(subtractExpr, binaryExprType); } @Override public void visit(AndExpression andExpr) { visitBinaryLogicalExpr(andExpr); } @Override public void visit(OrExpression orExpr) { visitBinaryLogicalExpr(orExpr); } @Override public void visit(EqualExpression equalExpr) { verifyBinaryEqualityExprType(equalExpr); } @Override public void visit(NotEqualExpression notEqualExpr) { verifyBinaryEqualityExprType(notEqualExpr); } @Override public void visit(GreaterEqualExpression greaterEqualExpr) { BType compareExprType = verifyBinaryCompareExprType(greaterEqualExpr); validateBinaryExprTypeForIntFloat(greaterEqualExpr, compareExprType); } @Override public void visit(GreaterThanExpression greaterThanExpr) { BType compareExprType = verifyBinaryCompareExprType(greaterThanExpr); validateBinaryExprTypeForIntFloat(greaterThanExpr, compareExprType); } @Override public void visit(LessEqualExpression lessEqualExpr) { BType compareExprType = verifyBinaryCompareExprType(lessEqualExpr); validateBinaryExprTypeForIntFloat(lessEqualExpr, compareExprType); } @Override public void visit(LessThanExpression lessThanExpr) { BType compareExprType = verifyBinaryCompareExprType(lessThanExpr); validateBinaryExprTypeForIntFloat(lessThanExpr, compareExprType); } @Override public void visit(RefTypeInitExpr refTypeInitExpr) { visitMapJsonInitExpr(refTypeInitExpr); } @Override public void visit(MapInitExpr mapInitExpr) { visitMapJsonInitExpr(mapInitExpr); } @Override public void visit(JSONInitExpr jsonInitExpr) { visitMapJsonInitExpr(jsonInitExpr); } @Override public void visit(JSONArrayInitExpr jsonArrayInitExpr) { BType inheritedType = jsonArrayInitExpr.getInheritedType(); jsonArrayInitExpr.setType(inheritedType); BType inheritedElementType; if (inheritedType instanceof BArrayType) { inheritedElementType = ((BArrayType) inheritedType).getElementType(); } else { inheritedElementType = inheritedType; } Expression[] argExprs = jsonArrayInitExpr.getArgExprs(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; if (argExpr instanceof RefTypeInitExpr) { argExpr = getNestedInitExpr(argExpr, inheritedElementType); argExprs[i] = argExpr; } visitSingleValueExpr(argExpr); BType argExprType = argExpr.getType(); if (BTypes.isValueType(argExprType)) { TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, argExpr); if (typeCastExpr != null) { argExprs[i] = typeCastExpr; } else { BLangExceptionHelper.throwSemanticError(argExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExprType.getSymbolName(), inheritedType.getSymbolName()); } continue; } if (argExprType != BTypes.typeNull && isAssignableTo(inheritedElementType, argExprType)) { continue; } TypeCastExpression typeCastExpr = checkWideningPossible(inheritedElementType, argExpr); if (typeCastExpr == null) { BLangExceptionHelper.throwSemanticError(jsonArrayInitExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, argExpr.getType(), inheritedElementType); } argExprs[i] = typeCastExpr; } } @Override public void visit(ConnectorInitExpr connectorInitExpr) { BType inheritedType = connectorInitExpr.getInheritedType(); if (!(inheritedType instanceof BallerinaConnectorDef)) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_INIT_NOT_ALLOWED); } connectorInitExpr.setType(inheritedType); for (Expression argExpr : connectorInitExpr.getArgExprs()) { visitSingleValueExpr(argExpr); } Expression[] argExprs = connectorInitExpr.getArgExprs(); ParameterDef[] parameterDefs = ((BallerinaConnectorDef) inheritedType).getParameterDefs(); for (int i = 0; i < argExprs.length; i++) { int j = i; if (((BallerinaConnectorDef) inheritedType).isFilterConnector()) { j += 1; } SimpleTypeName simpleTypeName = parameterDefs[j].getTypeName(); BType paramType = BTypes.resolveType(simpleTypeName, currentScope, connectorInitExpr.getNodeLocation()); parameterDefs[j].setType(paramType); Expression argExpr = argExprs[i]; if (!(parameterDefs[j].getType().equals(argExpr.getType()))) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.INCOMPATIBLE_TYPES, parameterDefs[j].getType(), argExpr.getType()); } } ConnectorInitExpr filterConnectorInitExpr = connectorInitExpr.getParentConnectorInitExpr(); if (filterConnectorInitExpr != null) { visit(filterConnectorInitExpr); BType filterConnectorType = filterConnectorInitExpr.getFilterSupportedType(); if (filterConnectorType != null && filterConnectorType instanceof BallerinaConnectorDef) { if (!filterConnectorType.equals(inheritedType)) { BLangExceptionHelper.throwSemanticError(connectorInitExpr, SemanticErrors.CONNECTOR_TYPES_NOT_EQUIVALENT, inheritedType, filterConnectorInitExpr.getInheritedType()); } } } } @Override public void visit(ArrayInitExpr arrayInitExpr) { if (!(arrayInitExpr.getInheritedType() instanceof BArrayType)) { BLangExceptionHelper.throwSemanticError(arrayInitExpr, SemanticErrors.ARRAY_INIT_NOT_ALLOWED_HERE); } visitArrayInitExpr(arrayInitExpr); } private void visitArrayInitExpr(ArrayInitExpr arrayInitExpr) { BType inheritedType = arrayInitExpr.getInheritedType(); arrayInitExpr.setType(inheritedType); Expression[] argExprs = arrayInitExpr.getArgExprs(); if (argExprs.length == 0) { return; } BType expectedElementType = ((BArrayType) inheritedType).getElementType(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; if (argExpr instanceof RefTypeInitExpr) { ((RefTypeInitExpr) argExpr).setInheritedType(expectedElementType); argExpr = getNestedInitExpr(argExpr, expectedElementType); argExprs[i] = argExpr; } visitSingleValueExpr(argExpr); AssignabilityResult result = performAssignabilityCheck(expectedElementType, argExpr); if (result.expression != null) { argExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(argExpr, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, argExpr.getType(), expectedElementType); } } } /** * Visit and analyze ballerina Struct initializing expression. */ @Override public void visit(StructInitExpr structInitExpr) { BType inheritedType = structInitExpr.getInheritedType(); structInitExpr.setType(inheritedType); Expression[] argExprs = structInitExpr.getArgExprs(); if (argExprs.length == 0) { return; } StructDef structDef = (StructDef) inheritedType; for (Expression argExpr : argExprs) { KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr; Expression keyExpr = keyValueExpr.getKeyExpr(); if (!(keyExpr instanceof SimpleVarRefExpr)) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.INVALID_FIELD_NAME_STRUCT_INIT); } SimpleVarRefExpr varRefExpr = (SimpleVarRefExpr) keyExpr; BLangSymbol varDefSymbol = structDef.resolveMembers(new SymbolName(varRefExpr.getSymbolName().getName(), structDef.getPackagePath())); if (varDefSymbol == null) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, varRefExpr.getVarName(), structDef.getName()); } if (!(varDefSymbol instanceof SimpleVariableDef)) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, varDefSymbol.getSymbolName()); } SimpleVariableDef varDef = (SimpleVariableDef) varDefSymbol; varRefExpr.setVariableDef(varDef); BType structFieldType = varDef.getType(); Expression valueExpr = keyValueExpr.getValueExpr(); if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, structFieldType); keyValueExpr.setValueExpr(valueExpr); } valueExpr.accept(this); AssignabilityResult result = performAssignabilityCheck(structFieldType, valueExpr); if (result.expression != null) { valueExpr = result.expression; keyValueExpr.setValueExpr(valueExpr); } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(keyExpr, SemanticErrors.INCOMPATIBLE_TYPES, varDef.getType(), valueExpr.getType()); } } } @Override public void visit(KeyValueExpr keyValueExpr) { } @Override public void visit(SimpleVarRefExpr simpleVarRefExpr) { if (simpleVarRefExpr.getPkgName() != null && simpleVarRefExpr.getPkgPath() == null) { throw BLangExceptionHelper.getSemanticError(simpleVarRefExpr.getNodeLocation(), SemanticErrors.UNDEFINED_PACKAGE_NAME, simpleVarRefExpr.getPkgName(), simpleVarRefExpr.getPkgName() + ":" + simpleVarRefExpr.getVarName()); } SymbolName symbolName = simpleVarRefExpr.getSymbolName(); BLangSymbol varDefSymbol = currentScope.resolve(symbolName); if (varDefSymbol == null) { BLangExceptionHelper.throwSemanticError(simpleVarRefExpr, SemanticErrors.UNDEFINED_SYMBOL, symbolName); } if (!(varDefSymbol instanceof VariableDef)) { throw BLangExceptionHelper.getSemanticError(simpleVarRefExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); } simpleVarRefExpr.setVariableDef((VariableDef) varDefSymbol); } @Override public void visit(FieldBasedVarRefExpr fieldBasedVarRefExpr) { String fieldName = fieldBasedVarRefExpr.getFieldName(); VariableReferenceExpr varRefExpr = fieldBasedVarRefExpr.getVarRefExpr(); varRefExpr.accept(this); BType varRefType = varRefExpr.getType(); if (varRefType instanceof StructDef) { StructDef structDef = (StructDef) varRefType; BLangSymbol fieldSymbol = structDef.resolveMembers(new SymbolName(fieldName, structDef.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, fieldName, structDef.getName()); } SimpleVariableDef fieldDef = (SimpleVariableDef) fieldSymbol; fieldBasedVarRefExpr.setFieldDef(fieldDef); fieldBasedVarRefExpr.setType(fieldDef.getType()); } else if (varRefType == BTypes.typeMap) { fieldBasedVarRefExpr.setType(((BMapType) varRefType).getElementType()); } else if (varRefType == BTypes.typeJSON) { fieldBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof BJSONConstraintType) { StructDef structDefReference = (StructDef) ((BJSONConstraintType) varRefType).getConstraint(); BLangSymbol fieldSymbol = structDefReference.resolveMembers( new SymbolName(fieldName, structDefReference.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper .getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_JSON_STRUCT, fieldName, structDefReference.getName()); } VariableDef fieldDef = (VariableDef) fieldSymbol; fieldBasedVarRefExpr.setFieldDef(fieldDef); fieldBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof BArrayType && fieldName.equals("length")) { if (fieldBasedVarRefExpr.isLHSExpr()) { throw BLangExceptionHelper.getSemanticError(fieldBasedVarRefExpr.getNodeLocation(), SemanticErrors.CANNOT_ASSIGN_VALUE_ARRAY_LENGTH); } fieldBasedVarRefExpr.setType(BTypes.typeInt); } else { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefType); } } @Override public void visit(IndexBasedVarRefExpr indexBasedVarRefExpr) { Expression indexExpr = indexBasedVarRefExpr.getIndexExpr(); indexExpr.accept(this); VariableReferenceExpr varRefExpr = indexBasedVarRefExpr.getVarRefExpr(); varRefExpr.accept(this); BType varRefType = varRefExpr.getType(); if (varRefType instanceof BArrayType) { if (indexExpr.getType() != BTypes.typeInt) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.NON_INTEGER_ARRAY_INDEX, indexExpr.getType()); } BArrayType arrayType = (BArrayType) varRefType; indexBasedVarRefExpr.setType(arrayType.getElementType()); } else if (varRefType == BTypes.typeMap) { if (indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.NON_STRING_MAP_INDEX, indexExpr.getType()); } BMapType mapType = (BMapType) varRefType; indexBasedVarRefExpr.setType(mapType.getElementType()); } else if (varRefType.getTag() == TypeTags.C_JSON_TAG) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefExpr.getType().toString()); } else if (varRefType == BTypes.typeJSON) { if (indexExpr.getType() != BTypes.typeInt && indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, "string or int", varRefExpr.getType()); } indexBasedVarRefExpr.setType(BTypes.typeJSON); } else if (varRefType instanceof StructDef) { if (indexExpr.getType() != BTypes.typeString) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, varRefExpr.getType()); } if (!(indexExpr instanceof BasicLiteral)) { throw BLangExceptionHelper.getSemanticError(indexExpr.getNodeLocation(), SemanticErrors.DYNAMIC_KEYS_NOT_SUPPORTED_FOR_STRUCT); } String fieldName = ((BasicLiteral) indexExpr).getBValue().stringValue(); StructDef structDef = (StructDef) varRefType; BLangSymbol fieldSymbol = structDef.resolveMembers(new SymbolName(fieldName, structDef.getPackagePath())); if (fieldSymbol == null) { throw BLangExceptionHelper.getSemanticError(varRefExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_STRUCT, fieldName, structDef.getName()); } SimpleVariableDef fieldDef = (SimpleVariableDef) fieldSymbol; indexBasedVarRefExpr.setFieldDef(fieldDef); indexBasedVarRefExpr.setType(fieldDef.getType()); } else { throw BLangExceptionHelper.getSemanticError(indexBasedVarRefExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_NOT_SUPPORT_INDEXING, varRefType); } } @Override public void visit(XMLAttributesRefExpr xmlAttributesRefExpr) { VariableReferenceExpr varRefExpr = xmlAttributesRefExpr.getVarRefExpr(); varRefExpr.accept(this); if (varRefExpr.getType() != BTypes.typeXML) { BLangExceptionHelper.throwSemanticError(xmlAttributesRefExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeXML, varRefExpr.getType()); } Expression indexExpr = xmlAttributesRefExpr.getIndexExpr(); if (indexExpr == null) { if (xmlAttributesRefExpr.isLHSExpr()) { BLangExceptionHelper.throwSemanticError(xmlAttributesRefExpr, SemanticErrors.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED); } xmlAttributesRefExpr.setType(BTypes.typeXMLAttributes); return; } xmlAttributesRefExpr.setType(BTypes.typeString); indexExpr.accept(this); if (indexExpr instanceof XMLQNameExpr) { ((XMLQNameExpr) indexExpr).setUsedInXML(true); return; } if (indexExpr.getType() != BTypes.typeString) { BLangExceptionHelper.throwSemanticError(indexExpr, SemanticErrors.NON_STRING_MAP_INDEX, indexExpr.getType()); } Map<String, Expression> namespaces = getNamespaceInScope(xmlAttributesRefExpr.getNodeLocation()); xmlAttributesRefExpr.setNamespaces(namespaces); } @Override public void visit(XMLQNameExpr xmlQNameRefExpr) { if (xmlQNameRefExpr.isLHSExpr()) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.XML_QNAME_UPDATE_NOT_ALLOWED); } xmlQNameRefExpr.setType(BTypes.typeString); String prefix = xmlQNameRefExpr.getPrefix(); if (prefix.isEmpty()) { return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.INVALID_NAMESPACE_PREFIX, prefix); } NamespaceSymbolName nsSymbolName = new NamespaceSymbolName(prefix); BLangSymbol symbol = currentScope.resolve(nsSymbolName); if (symbol == null) { BLangExceptionHelper.throwSemanticError(xmlQNameRefExpr, SemanticErrors.UNDEFINED_NAMESPACE, prefix); } String namepsaceUri = ((NamespaceDeclaration) symbol).getNamespaceUri(); BasicLiteral namespaceUriLiteral = new BasicLiteral(xmlQNameRefExpr.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(namepsaceUri)); namespaceUriLiteral.accept(this); xmlQNameRefExpr.setNamepsaceUri(namespaceUriLiteral); } @Override public void visit(TypeCastExpression typeCastExpr) { boolean isMultiReturn = typeCastExpr.isMultiReturnExpr(); Expression rExpr = typeCastExpr.getRExpr(); visitSingleValueExpr(rExpr); BType sourceType = rExpr.getType(); BType targetType = typeCastExpr.getType(); if (targetType == null) { targetType = BTypes.resolveType(typeCastExpr.getTypeName(), currentScope, typeCastExpr.getNodeLocation()); typeCastExpr.setType(targetType); } if (sourceType instanceof BFunctionType || targetType instanceof BFunctionType) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } if (rExpr instanceof NullLiteral) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } TypeEdge newEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null); if (newEdge != null) { typeCastExpr.setOpcode(newEdge.getOpcode()); if (!newEdge.isSafe() && !isMultiReturn) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.UNSAFE_CAST_ATTEMPT, sourceType, targetType); } if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else if (sourceType == targetType) { typeCastExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else if ((sourceType.getTag() == TypeTags.C_JSON_TAG && targetType.getTag() == TypeTags.C_JSON_TAG) && TypeLattice.isAssignCompatible((StructDef) ((BJSONConstraintType) targetType).getConstraint(), (StructDef) ((BJSONConstraintType) sourceType).getConstraint())) { typeCastExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeCastExpr.setTypes(new BType[]{targetType}); return; } } else { boolean isUnsafeCastPossible = false; if (isMultiReturn) { isUnsafeCastPossible = checkUnsafeCastPossible(sourceType, targetType); } if (isUnsafeCastPossible) { typeCastExpr.setOpcode(InstructionCodes.CHECKCAST); } else { TypeEdge conversionEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sourceType, targetType, null); if (conversionEdge != null) { throw BLangExceptionHelper.getSemanticError(typeCastExpr.getNodeLocation(), SemanticErrors.CANNOT_CAST_WITH_SUGGESTION, sourceType, targetType); } throw BLangExceptionHelper.getSemanticError(typeCastExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CAST, sourceType, targetType); } } BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_CAST_ERROR, ERRORS_PACKAGE)); if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(typeCastExpr, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_CAST_ERROR); } typeCastExpr.setTypes(new BType[]{targetType, (BType) error}); } @Override public void visit(TypeConversionExpr typeConversionExpr) { boolean isMultiReturn = typeConversionExpr.isMultiReturnExpr(); Expression rExpr = typeConversionExpr.getRExpr(); visitSingleValueExpr(rExpr); BType sourceType = rExpr.getType(); BType targetType = typeConversionExpr.getType(); if (targetType == null) { targetType = BTypes.resolveType(typeConversionExpr.getTypeName(), currentScope, null); typeConversionExpr.setType(targetType); } if (rExpr instanceof NullLiteral) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, sourceType, targetType); } TypeEdge newEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sourceType, targetType, null); if (newEdge != null) { typeConversionExpr.setOpcode(newEdge.getOpcode()); if (!newEdge.isSafe() && !isMultiReturn) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.UNSAFE_CONVERSION_ATTEMPT, sourceType, targetType); } if (!isMultiReturn) { typeConversionExpr.setTypes(new BType[]{targetType}); return; } } else if (sourceType == targetType) { typeConversionExpr.setOpcode(InstructionCodes.NOP); if (!isMultiReturn) { typeConversionExpr.setTypes(new BType[]{targetType}); return; } } else { TypeEdge castEdge = TypeLattice.getExplicitCastLattice().getEdgeFromTypes(sourceType, targetType, null); if (castEdge != null) { throw BLangExceptionHelper.getSemanticError(typeConversionExpr.getNodeLocation(), SemanticErrors.CANNOT_CONVERT_WITH_SUGGESTION, sourceType, targetType); } throw BLangExceptionHelper.getSemanticError(typeConversionExpr.getNodeLocation(), SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, sourceType, targetType); } BLangSymbol error = currentScope.resolve(new SymbolName(BALLERINA_CONVERSION_ERROR, ERRORS_PACKAGE)); if (error == null || !(error instanceof StructDef)) { BLangExceptionHelper.throwSemanticError(typeConversionExpr, SemanticErrors.CANNOT_RESOLVE_STRUCT, ERRORS_PACKAGE, BALLERINA_CAST_ERROR); } typeConversionExpr.setTypes(new BType[]{targetType, (BType) error}); } @Override public void visit(NullLiteral nullLiteral) { nullLiteral.setType(BTypes.typeNull); } @Override public void visit(LambdaExpression lambdaExpr) { } @Override public void visit(StringTemplateLiteral stringTemplateLiteral) { Expression[] items = stringTemplateLiteral.getArgExprs(); Expression concatExpr; if (items.length == 1) { concatExpr = items[0]; } else { concatExpr = items[0]; for (int i = 1; i < items.length; i++) { Expression currentItem = items[i]; concatExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), concatExpr, currentItem); } } concatExpr.accept(this); concatExpr.setType(BTypes.typeString); stringTemplateLiteral.setConcatExpr(concatExpr); stringTemplateLiteral.setType(BTypes.typeString); } @Override public void visit(NamespaceDeclarationStmt namespaceDeclarationStmt) { namespaceDeclarationStmt.getNamespaceDclr().accept(this); } @Override public void visit(NamespaceDeclaration namespaceDclr) { if (namespaceDclr.getNamespaceUri().isEmpty() && !namespaceDclr.getPrefix().isEmpty()) { BLangExceptionHelper.throwSemanticError(namespaceDclr, SemanticErrors.INVALID_NAMESPACE_DECLARATION, namespaceDclr.getPrefix()); } NamespaceSymbolName nsSymbolName = new NamespaceSymbolName(namespaceDclr.getPrefix()); BLangSymbol nsSymbol = currentScope.resolve(nsSymbolName); if (nsSymbol != null && nsSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { BLangExceptionHelper.throwSemanticError(namespaceDclr, SemanticErrors.REDECLARED_SYMBOL, namespaceDclr.getPrefix()); } currentScope.define(nsSymbolName, namespaceDclr); } @Override public void visit(XMLLiteral xmlLiteral) { } @Override public void visit(XMLElementLiteral xmlElementLiteral) { Expression startTagName = xmlElementLiteral.getStartTagName(); Map<String, Expression> namespaces; XMLElementLiteral parent = xmlElementLiteral.getParent(); if (parent == null) { namespaces = getNamespaceInScope(xmlElementLiteral.getNodeLocation()); } else { namespaces = parent.getNamespaces(); xmlElementLiteral.setDefaultNamespaceUri(parent.getDefaultNamespaceUri()); } xmlElementLiteral.setNamespaces(namespaces); List<KeyValueExpr> attributes = xmlElementLiteral.getAttributes(); Iterator<KeyValueExpr> attrItr = attributes.iterator(); while (attrItr.hasNext()) { KeyValueExpr attribute = attrItr.next(); Expression attrNameExpr = attribute.getKeyExpr(); if (!(attrNameExpr instanceof XMLQNameExpr)) { continue; } Expression attrValueExpr = attribute.getValueExpr(); XMLQNameExpr xmlQNameRefExpr = (XMLQNameExpr) attrNameExpr; if (xmlQNameRefExpr.getPrefix().equals(XMLConstants.XMLNS_ATTRIBUTE)) { attrValueExpr.accept(this); if (attrValueExpr instanceof BasicLiteral && ((BasicLiteral) attrValueExpr).getBValue().stringValue().isEmpty()) { BLangExceptionHelper.throwSemanticError(attribute, SemanticErrors.INVALID_NAMESPACE_DECLARATION, xmlQNameRefExpr.getLocalname()); } namespaces.put(xmlQNameRefExpr.getLocalname(), attrValueExpr); attrItr.remove(); continue; } if (xmlQNameRefExpr.getLocalname().equals(XMLConstants.XMLNS_ATTRIBUTE)) { attrValueExpr.accept(this); xmlElementLiteral.setDefaultNamespaceUri(attrValueExpr); attrItr.remove(); } } if (xmlElementLiteral.getDefaultNamespaceUri() == null) { BasicLiteral defaultnsUriLiteral = new BasicLiteral(xmlElementLiteral.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(XMLConstants.XMLNS_ATTRIBUTE_NS_URI)); defaultnsUriLiteral.setType(BTypes.typeString); defaultnsUriLiteral.accept(this); xmlElementLiteral.setDefaultNamespaceUri(defaultnsUriLiteral); } validateXMLLiteralAttributes(attributes, namespaces); if (startTagName instanceof XMLQNameExpr) { validateXMLQname((XMLQNameExpr) startTagName, namespaces, xmlElementLiteral.getDefaultNamespaceUri()); } else { startTagName.accept(this); } if (startTagName.getType() != BTypes.typeString) { startTagName = createImplicitStringConversionExpr(startTagName, startTagName.getType()); xmlElementLiteral.setStartTagName(startTagName); } validateXMLLiteralEndTag(xmlElementLiteral, xmlElementLiteral.getDefaultNamespaceUri()); XMLSequenceLiteral children = xmlElementLiteral.getContent(); if (children != null) { children.accept(this); } } @Override public void visit(XMLCommentLiteral xmlComment) { Expression contentExpr = xmlComment.getContent(); if (contentExpr == null) { return; } contentExpr.accept(this); if (contentExpr.getType() != BTypes.typeString) { contentExpr = createImplicitStringConversionExpr(contentExpr, contentExpr.getType()); xmlComment.setContent(contentExpr); } } @Override public void visit(XMLTextLiteral xmlText) { Expression contentExpr = xmlText.getContent(); if (contentExpr == null) { return; } contentExpr.accept(this); } @Override public void visit(XMLSequenceLiteral xmlSequence) { Expression[] items = xmlSequence.getItems(); List<Expression> newItems = new ArrayList<Expression>(); Expression addExpr = null; for (int i = 0; i < items.length; i++) { Expression currentItem = items[i]; currentItem.accept(this); if (xmlSequence.hasParent() && currentItem.getType() == BTypes.typeXML) { if (addExpr != null) { newItems.add(addExpr); addExpr = null; } newItems.add(currentItem); continue; } if (currentItem.getType() != BTypes.typeString) { Expression castExpr = getImplicitConversionExpr(currentItem, currentItem.getType(), BTypes.typeString); if (castExpr == null) { if (xmlSequence.hasParent()) { BLangExceptionHelper.throwSemanticError(currentItem, SemanticErrors.INCOMPATIBLE_TYPES_IN_XML_TEMPLATE, currentItem.getType()); } BLangExceptionHelper.throwSemanticError(currentItem, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, currentItem.getType()); } currentItem = castExpr; } if (addExpr == null) { addExpr = currentItem; continue; } if (addExpr.getType() == BTypes.typeString) { addExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), addExpr, currentItem); } else { newItems.add(addExpr); addExpr = currentItem; } addExpr.setType(BTypes.typeString); } if (addExpr != null) { newItems.add(addExpr); } items = newItems.toArray(new Expression[newItems.size()]); xmlSequence.setItems(items); xmlSequence.setConcatExpr(getXMLConcatExpression(items)); } @Override public void visit(XMLPILiteral xmlPI) { Expression target = xmlPI.getTarget(); target.accept(this); if (target.getType() != BTypes.typeString) { target = createImplicitStringConversionExpr(target, target.getType()); xmlPI.setTarget(target); } Expression data = xmlPI.getData(); if (data == null) { return; } data.accept(this); if (data.getType() != BTypes.typeString) { data = createImplicitStringConversionExpr(data, data.getType()); xmlPI.setData(data); } } private void openScope(SymbolScope symbolScope) { currentScope = symbolScope; } private void closeScope() { currentScope = currentScope.getEnclosingScope(); } private void visitBinaryExpr(BinaryExpression expr) { visitSingleValueExpr(expr.getLExpr()); visitSingleValueExpr(expr.getRExpr()); } private void visitSingleValueExpr(Expression expr) { expr.accept(this); if (expr.isMultiReturnExpr()) { FunctionInvocationExpr funcIExpr = (FunctionInvocationExpr) expr; String nameWithPkgName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" + funcIExpr.getName() : funcIExpr.getName(); BLangExceptionHelper.throwSemanticError(expr, SemanticErrors.MULTIPLE_VALUE_IN_SINGLE_VALUE_CONTEXT, nameWithPkgName); } } private void validateBinaryExprTypeForIntFloat(BinaryExpression binaryExpr, BType binaryExprType) { if (binaryExprType != BTypes.typeInt && binaryExprType != BTypes.typeFloat) { throwInvalidBinaryOpError(binaryExpr); } } private BType verifyBinaryArithmeticExprType(BinaryArithmeticExpression binaryArithmeticExpr) { visitBinaryExpr(binaryArithmeticExpr); BType type = verifyBinaryExprType(binaryArithmeticExpr); binaryArithmeticExpr.setType(type); return type; } private BType verifyBinaryCompareExprType(BinaryExpression binaryExpression) { visitBinaryExpr(binaryExpression); BType type = verifyBinaryExprType(binaryExpression); binaryExpression.setType(BTypes.typeBoolean); return type; } private void verifyBinaryEqualityExprType(BinaryExpression binaryExpr) { visitBinaryExpr(binaryExpr); BType rType = binaryExpr.getRExpr().getType(); BType lType = binaryExpr.getLExpr().getType(); BType type; if (rType == BTypes.typeNull) { if (BTypes.isValueType(lType)) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType); } type = rType; } else if (lType == BTypes.typeNull) { if (BTypes.isValueType(rType)) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lType, rType); } type = lType; } else { type = verifyBinaryExprType(binaryExpr); } binaryExpr.setType(BTypes.typeBoolean); if (type != BTypes.typeInt && type != BTypes.typeFloat && type != BTypes.typeBoolean && type != BTypes.typeString && type != BTypes.typeNull && type != BTypes.typeType) { throwInvalidBinaryOpError(binaryExpr); } } private BType verifyBinaryExprType(BinaryExpression binaryExpr) { Expression rExpr = binaryExpr.getRExpr(); Expression lExpr = binaryExpr.getLExpr(); BType rType = rExpr.getType(); BType lType = lExpr.getType(); if (rType.equals(lType)) { return rType; } if ((rType.equals(BTypes.typeString) || lType.equals(BTypes.typeString)) && !(binaryExpr.getOperator().equals(Operator.ADD))) { throw getInvalidBinaryOpError(binaryExpr); } if ((rType.equals(BTypes.typeString))) { Expression newExpr = createConversionExpr(binaryExpr, lExpr, lType, rType); binaryExpr.setLExpr(newExpr); return rType; } else if (lType.equals(BTypes.typeString)) { Expression newExpr = createConversionExpr(binaryExpr, rExpr, rType, lType); binaryExpr.setRExpr(newExpr); return lType; } if (rType.equals(BTypes.typeInt) && lType.equals(BTypes.typeFloat)) { Expression newExpr = createConversionExpr(binaryExpr, rExpr, rType, lType); binaryExpr.setRExpr(newExpr); return lType; } if (lType.equals(BTypes.typeInt) && rType.equals(BTypes.typeFloat)) { Expression newExpr = createConversionExpr(binaryExpr, lExpr, lType, rType); binaryExpr.setLExpr(newExpr); return rType; } throw getInvalidBinaryOpError(binaryExpr); } private Expression createConversionExpr(BinaryExpression binaryExpr, Expression sExpr, BType sType, BType tType) { Expression conversionExpr = getImplicitConversionExpr(sExpr, sType, tType); if (conversionExpr != null) { return conversionExpr; } throw getInvalidBinaryOpError(binaryExpr); } private Expression getImplicitConversionExpr(Expression sExpr, BType sType, BType tType) { TypeEdge newEdge; newEdge = TypeLattice.getTransformLattice().getEdgeFromTypes(sType, tType, null); if (newEdge != null) { TypeConversionExpr newExpr = new TypeConversionExpr(sExpr.getNodeLocation(), sExpr.getWhiteSpaceDescriptor(), sExpr, tType); newExpr.setOpcode(newEdge.getOpcode()); newExpr.accept(this); return newExpr; } return null; } private void visitBinaryLogicalExpr(BinaryLogicalExpression expr) { visitBinaryExpr(expr); Expression rExpr = expr.getRExpr(); Expression lExpr = expr.getLExpr(); if (lExpr.getType() == BTypes.typeBoolean && rExpr.getType() == BTypes.typeBoolean) { expr.setType(BTypes.typeBoolean); } else { throwInvalidBinaryOpError(expr); } } private void checkForConstAssignment(AssignStmt assignStmt, Expression lExpr) { if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVariableDef().getKind() == VariableDef.Kind.CONSTANT) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.CANNOT_ASSIGN_VALUE_CONSTANT, ((SimpleVarRefExpr) lExpr).getSymbolName()); } } private void checkForMultiAssignmentErrors(AssignStmt assignStmt, Expression[] lExprs, CallableUnitInvocationExpr rExpr) { BType[] returnTypes = rExpr.getTypes(); if (lExprs.length != returnTypes.length) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length); } for (int i = 0; i < lExprs.length; i++) { Expression lExpr = lExprs[i]; if (lExpr instanceof SimpleVarRefExpr) { String varName = ((SimpleVarRefExpr) lExpr).getVarName(); if ("_".equals(varName)) { continue; } } BType lhsType = lExprs[i].getType(); BType rhsType = returnTypes[i]; if (isAssignableTo(lhsType, rhsType)) { continue; } BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_ASSIGNMENT, rhsType, lExpr.getType()); } } private void checkForMultiValuedCastingErrors(AssignStmt assignStmt, Expression[] lExprs, ExecutableMultiReturnExpr rExpr) { BType[] returnTypes = rExpr.getTypes(); if (lExprs.length != returnTypes.length) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.ASSIGNMENT_COUNT_MISMATCH, lExprs.length, returnTypes.length); } for (int i = 0; i < lExprs.length; i++) { Expression lExpr = lExprs[i]; BType returnType = returnTypes[i]; if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVarName().equals("_")) { continue; } if ((lExpr.getType() != BTypes.typeAny) && (!lExpr.getType().equals(returnType))) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.INCOMPATIBLE_TYPES, returnType, lExpr.getType()); } } } private void visitLExprsOfAssignment(AssignStmt assignStmt, Expression[] lExprs) { if (assignStmt.isDeclaredWithVar()) { Set<String> varNameSet = new HashSet<>(); int declaredVarCount = 0; for (Expression expr : lExprs) { if (!(expr instanceof SimpleVarRefExpr)) { throw BLangExceptionHelper.getSemanticError(assignStmt.getNodeLocation(), SemanticErrors.INVALID_VAR_ASSIGNMENT); } SimpleVarRefExpr refExpr = (SimpleVarRefExpr) expr; String varName = refExpr.getVarName(); if (varName.equals("_")) { declaredVarCount++; continue; } if (!varNameSet.add(varName)) { BLangExceptionHelper.throwSemanticError(assignStmt, SemanticErrors.VAR_IS_REPEATED_ON_LEFT_SIDE_ASSIGNMENT, varName); } Identifier identifier = new Identifier(varName); SymbolName symbolName = new SymbolName(identifier.getName()); SimpleVariableDef variableDef = new SimpleVariableDef(refExpr.getNodeLocation(), refExpr.getWhiteSpaceDescriptor(), identifier, null, symbolName, currentScope); variableDef.setKind(VariableDef.Kind.LOCAL_VAR); SymbolName varDefSymName = new SymbolName(variableDef.getName(), currentPkg); BLangSymbol varSymbol = currentScope.resolve(varDefSymName); if (varSymbol != null && varSymbol.getSymbolScope().getScopeName() == currentScope.getScopeName()) { declaredVarCount++; continue; } currentScope.define(varDefSymName, variableDef); } if (declaredVarCount == lExprs.length) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( assignStmt.getNodeLocation(), SemanticErrors.NO_NEW_VARIABLES_VAR_ASSIGNMENT)); } } int ignoredVarCount = 0; for (Expression lExpr : lExprs) { if (lExpr instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) lExpr).getVarName().equals("_")) { ignoredVarCount++; continue; } ((VariableReferenceExpr) lExpr).setLHSExpr(true); lExpr.accept(this); checkForConstAssignment(assignStmt, lExpr); } if (ignoredVarCount == lExprs.length) { throw new SemanticException(BLangExceptionHelper.constructSemanticError( assignStmt.getNodeLocation(), SemanticErrors.IGNORED_ASSIGNMENT)); } } private void linkFunction(FunctionInvocationExpr funcIExpr) { String pkgPath = funcIExpr.getPackagePath(); Expression[] exprs = funcIExpr.getArgExprs(); BType[] paramTypes = new BType[exprs.length]; for (int i = 0; i < exprs.length; i++) { paramTypes[i] = exprs[i].getType(); } FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(funcIExpr.getName(), pkgPath, paramTypes); BLangSymbol functionSymbol = currentScope.resolve(symbolName); if (functionSymbol instanceof SimpleVariableDef && ((SimpleVariableDef) functionSymbol).getType() instanceof BFunctionType) { SimpleVariableDef variableDef = (SimpleVariableDef) functionSymbol; matchAndUpdateFunctionPointsArgs(funcIExpr, symbolName, (BFunctionType) (variableDef).getType()); funcIExpr.setFunctionPointerInvocation(true); funcIExpr.setFunctionPointerVariableDef(variableDef); return; } functionSymbol = matchAndUpdateArguments(funcIExpr, symbolName, functionSymbol); if (functionSymbol == null) { String funcName = (funcIExpr.getPackageName() != null) ? funcIExpr.getPackageName() + ":" + funcIExpr.getName() : funcIExpr.getName(); BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.UNDEFINED_FUNCTION, funcName); return; } Function function; if (functionSymbol.isNative()) { functionSymbol = ((BallerinaFunction) functionSymbol).getNativeFunction(); NativeUnit nativeUnit = ((NativeUnitProxy) functionSymbol).load(); SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames(); BType[] returnTypes = new BType[returnParamTypeNames.length]; for (int i = 0; i < returnParamTypeNames.length; i++) { SimpleTypeName typeName = returnParamTypeNames[i]; BType bType = BTypes.resolveType(typeName, currentScope, funcIExpr.getNodeLocation()); returnTypes[i] = bType; } if (!(nativeUnit instanceof Function)) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); } function = (Function) nativeUnit; function.setReturnParamTypes(returnTypes); } else { if (!(functionSymbol instanceof Function)) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, symbolName); return; } function = (Function) functionSymbol; } funcIExpr.setCallableUnit(function); } private void linkAction(ActionInvocationExpr actionIExpr) { String pkgPath = actionIExpr.getPackagePath(); String connectorName = actionIExpr.getConnectorName(); SymbolName connectorSymbolName = new SymbolName(connectorName, pkgPath); BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName); if (connectorSymbol == null) { String connectorWithPkgName = (actionIExpr.getPackageName() != null) ? actionIExpr.getPackageName() + ":" + actionIExpr.getConnectorName() : actionIExpr.getConnectorName(); BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_CONNECTOR, connectorWithPkgName); return; } Expression[] exprs = actionIExpr.getArgExprs(); BType[] paramTypes = new BType[exprs.length]; for (int i = 0; i < exprs.length; i++) { paramTypes[i] = exprs[i].getType(); } ActionSymbolName actionSymbolName = LangModelUtils.getActionSymName(actionIExpr.getName(), actionIExpr.getPackagePath(), actionIExpr.getConnectorName(), paramTypes); BLangSymbol actionSymbol = null; if (connectorSymbol instanceof BallerinaConnectorDef) { actionSymbol = ((BallerinaConnectorDef) connectorSymbol).resolveMembers(actionSymbolName); } else { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_CONNECTOR_EXPECTED, connectorSymbolName); } actionSymbol = matchAndUpdateArguments(actionIExpr, actionSymbolName, actionSymbol); if ((actionSymbol instanceof BallerinaAction) && (actionSymbol.isNative())) { actionSymbol = ((BallerinaAction) actionSymbol).getNativeAction(); } if (actionSymbol == null) { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.UNDEFINED_ACTION, actionIExpr.getName(), connectorSymbol.getSymbolName()); } Action action = null; if (actionSymbol instanceof NativeUnitProxy) { NativeUnit nativeUnit = ((NativeUnitProxy) actionSymbol).load(); SimpleTypeName[] returnParamTypeNames = nativeUnit.getReturnParamTypeNames(); BType[] returnTypes = new BType[returnParamTypeNames.length]; for (int i = 0; i < returnParamTypeNames.length; i++) { SimpleTypeName typeName = returnParamTypeNames[i]; BType bType = BTypes.resolveType(typeName, currentScope, actionIExpr.getNodeLocation()); returnTypes[i] = bType; } if (!(nativeUnit instanceof Action)) { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, actionSymbolName); } action = (Action) nativeUnit; action.setReturnParamTypes(returnTypes); } else if (actionSymbol instanceof Action) { action = (Action) actionSymbol; } else { BLangExceptionHelper.throwSemanticError(actionIExpr, SemanticErrors.INCOMPATIBLE_TYPES_UNKNOWN_FOUND, actionSymbolName); } actionIExpr.setCallableUnit(action); } /** * Helper method to match the callable unit with invocation (check whether parameters map, do cast if applicable). * * @param callableIExpr invocation expression * @param symbolName callable symbol name * @param callableSymbol matching symbol * @return callableSymbol matching symbol */ private BLangSymbol matchAndUpdateArguments(AbstractExpression callableIExpr, CallableUnitSymbolName symbolName, BLangSymbol callableSymbol) { if (callableSymbol == null) { return null; } Expression[] argExprs = ((CallableUnitInvocationExpr) callableIExpr).getArgExprs(); Expression[] updatedArgExprs = new Expression[argExprs.length]; CallableUnitSymbolName funcSymName = (CallableUnitSymbolName) callableSymbol.getSymbolName(); if (!funcSymName.isNameAndParamCountMatch(symbolName)) { return null; } boolean implicitCastPossible = true; if (callableSymbol instanceof NativeUnitProxy) { NativeUnit nativeUnit = ((NativeUnitProxy) callableSymbol).load(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; SimpleTypeName simpleTypeName = nativeUnit.getArgumentTypeNames()[i]; BType lhsType = BTypes.resolveType(simpleTypeName, currentScope, callableIExpr.getNodeLocation()); AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { implicitCastPossible = false; break; } } } else { for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; BType lhsType = ((CallableUnit) callableSymbol).getParameterDefs()[i].getType(); AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { implicitCastPossible = false; break; } } } if (!implicitCastPossible) { return null; } for (int i = 0; i < updatedArgExprs.length; i++) { ((CallableUnitInvocationExpr) callableIExpr).getArgExprs()[i] = updatedArgExprs[i]; } return callableSymbol; } private void matchAndUpdateFunctionPointsArgs(FunctionInvocationExpr funcIExpr, CallableUnitSymbolName symbolName, BFunctionType bFunctionType) { if (symbolName.getNoOfParameters() != bFunctionType.getParameterType().length) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, funcIExpr.getName()); } Expression[] argExprs = funcIExpr.getArgExprs(); Expression[] updatedArgExprs = new Expression[argExprs.length]; for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; updatedArgExprs[i] = argExpr; BType lhsType = bFunctionType.getParameterType()[i]; AssignabilityResult result = performAssignabilityCheck(lhsType, argExpr); if (result.expression != null) { updatedArgExprs[i] = result.expression; } else if (!result.assignable) { BLangExceptionHelper.throwSemanticError(funcIExpr, SemanticErrors.INCORRECT_FUNCTION_ARGUMENTS, funcIExpr.getName()); } } for (int i = 0; i < updatedArgExprs.length; i++) { funcIExpr.getArgExprs()[i] = updatedArgExprs[i]; } } private void linkWorker(WorkerInvocationStmt workerInvocationStmt) { String workerName = workerInvocationStmt.getCallableUnitName(); SymbolName workerSymbolName = new SymbolName(workerName); Worker worker = (Worker) currentScope.resolve(workerSymbolName); if (worker == null) { throw new LinkerException(workerInvocationStmt.getNodeLocation().getFileName() + ":" + workerInvocationStmt.getNodeLocation().getLineNumber() + ": undefined worker '" + workerInvocationStmt.getCallableUnitName() + "'"); } workerInvocationStmt.setCallableUnit(worker); } private void throwInvalidBinaryOpError(BinaryExpression binaryExpr) { BType lExprType = binaryExpr.getLExpr().getType(); BType rExprType = binaryExpr.getRExpr().getType(); if (lExprType == rExprType) { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType); } else { BLangExceptionHelper.throwSemanticError(binaryExpr, SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType); } } private SemanticException getInvalidBinaryOpError(BinaryExpression binaryExpr) { BType lExprType = binaryExpr.getLExpr().getType(); BType rExprType = binaryExpr.getRExpr().getType(); if (lExprType == rExprType) { return BLangExceptionHelper.getSemanticError(binaryExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, binaryExpr.getOperator(), lExprType); } else { return BLangExceptionHelper.getSemanticError(binaryExpr.getNodeLocation(), SemanticErrors.INVALID_OPERATION_INCOMPATIBLE_TYPES, lExprType, rExprType); } } private void throwInvalidUnaryOpError(UnaryExpression unaryExpr) { BType rExprType = unaryExpr.getRExpr().getType(); BLangExceptionHelper.throwSemanticError(unaryExpr, SemanticErrors.INVALID_OPERATION_OPERATOR_NOT_DEFINED, unaryExpr.getOperator(), rExprType); } private TypeCastExpression checkWideningPossible(BType lhsType, Expression rhsExpr) { TypeCastExpression typeCastExpr = null; BType rhsType = rhsExpr.getType(); TypeEdge typeEdge = TypeLattice.getImplicitCastLattice().getEdgeFromTypes(rhsType, lhsType, null); if (typeEdge != null) { typeCastExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), rhsExpr.getWhiteSpaceDescriptor(), rhsExpr, lhsType); typeCastExpr.setOpcode(typeEdge.getOpcode()); } return typeCastExpr; } private void defineWorkers(Worker[] workers, CallableUnit callableUnit) { for (Worker worker : workers) { SymbolName symbolName = new SymbolName(worker.getName(), null); worker.setSymbolName(symbolName); BLangSymbol workerSymbol = callableUnit.getSymbolScope().resolve(symbolName); if (workerSymbol != null) { BLangExceptionHelper.throwSemanticError(worker, SemanticErrors.REDECLARED_SYMBOL, worker.getName()); } callableUnit.getSymbolScope().define(symbolName, worker); } } private void defineFunctions(Function[] functions) { for (Function function : functions) { ParameterDef[] paramDefArray = function.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } function.setParameterTypes(paramTypes); FunctionSymbolName symbolName = LangModelUtils.getFuncSymNameWithParams(function.getName(), function.getPackagePath(), paramTypes); function.setSymbolName(symbolName); BLangSymbol functionSymbol = currentScope.resolve(symbolName); if (!function.isNative() && functionSymbol != null) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.REDECLARED_SYMBOL, function.getName()); } if (function.isNative() && functionSymbol == null) { functionSymbol = nativeScope.resolve(symbolName); if (functionSymbol == null) { BLangExceptionHelper.throwSemanticError(function, SemanticErrors.UNDEFINED_FUNCTION, function.getName()); } if (function instanceof BallerinaFunction) { ((BallerinaFunction) function).setNativeFunction((NativeUnitProxy) functionSymbol); } } currentScope.define(symbolName, function); ParameterDef[] returnParameters = function.getReturnParameters(); BType[] returnTypes = new BType[returnParameters.length]; for (int i = 0; i < returnParameters.length; i++) { ParameterDef paramDef = returnParameters[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); returnTypes[i] = bType; } function.setReturnParamTypes(returnTypes); if (function.getWorkers().length > 0) { defineWorkers(function.getWorkers(), function); } } } private void defineConnectors(BallerinaConnectorDef[] connectorDefArray) { for (BallerinaConnectorDef connectorDef : connectorDefArray) { String connectorName = connectorDef.getName(); SymbolName connectorSymbolName = new SymbolName(connectorName, connectorDef.getPackagePath()); BLangSymbol connectorSymbol = currentScope.resolve(connectorSymbolName); if (connectorSymbol != null) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.REDECLARED_SYMBOL, connectorName); } currentScope.define(connectorSymbolName, connectorDef); BLangSymbol actionSymbol; SymbolName name = new SymbolName("NativeAction." + connectorName + ".<init>", connectorDef.getPackagePath()); actionSymbol = nativeScope.resolve(name); if (actionSymbol != null) { if (actionSymbol instanceof NativeUnitProxy) { AbstractNativeAction nativeUnit = (AbstractNativeAction) ((NativeUnitProxy) actionSymbol).load(); BallerinaAction.BallerinaActionBuilder ballerinaActionBuilder = new BallerinaAction .BallerinaActionBuilder(connectorDef); ballerinaActionBuilder.setIdentifier(nativeUnit.getIdentifier()); ballerinaActionBuilder.setPkgPath(nativeUnit.getPackagePath()); ballerinaActionBuilder.setNative(nativeUnit.isNative()); ballerinaActionBuilder.setSymbolName(nativeUnit.getSymbolName()); ParameterDef paramDef = new ParameterDef(connectorDef.getNodeLocation(), null, new Identifier(nativeUnit.getArgumentNames()[0]), nativeUnit.getArgumentTypeNames()[0], new SymbolName(nativeUnit.getArgumentNames()[0], connectorDef.getPackagePath()), ballerinaActionBuilder.getCurrentScope()); paramDef.setType(connectorDef); ballerinaActionBuilder.addParameter(paramDef); BallerinaAction ballerinaAction = ballerinaActionBuilder.buildAction(); ballerinaAction.setNativeAction((NativeUnitProxy) actionSymbol); ballerinaAction.setConnectorDef(connectorDef); BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); ballerinaAction.setParameterTypes(new BType[]{bType}); connectorDef.setInitAction(ballerinaAction); } } } for (BallerinaConnectorDef connectorDef : connectorDefArray) { openScope(connectorDef); for (BallerinaAction bAction : connectorDef.getActions()) { bAction.setConnectorDef(connectorDef); defineAction(bAction, connectorDef); } closeScope(); } } private void defineAction(BallerinaAction action, BallerinaConnectorDef connectorDef) { ParameterDef[] updatedParamDefs = new ParameterDef[action.getParameterDefs().length + 1]; ParameterDef connectorParamDef = new ParameterDef(connectorDef.getNodeLocation(), null, new Identifier(TypeConstants.CONNECTOR_TNAME), new SimpleTypeName(connectorDef.getName(), null, connectorDef.getPackagePath()), new SymbolName(TypeConstants.CONNECTOR_TNAME, connectorDef.getPackagePath()), action.getSymbolScope()); connectorParamDef.setType(connectorDef); updatedParamDefs[0] = connectorParamDef; for (int i = 0; i < action.getParameterDefs().length; i++) { updatedParamDefs[i + 1] = action.getParameterDefs()[i]; } action.setParameterDefs(updatedParamDefs); ParameterDef[] paramDefArray = action.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } action.setParameterTypes(paramTypes); ActionSymbolName symbolName = LangModelUtils.getActionSymName(action.getName(), action.getPackagePath(), connectorDef.getName(), paramTypes); action.setSymbolName(symbolName); BLangSymbol actionSymbol = currentScope.resolve(symbolName); if (actionSymbol != null) { BLangExceptionHelper.throwSemanticError(action, SemanticErrors.REDECLARED_SYMBOL, action.getName()); } currentScope.define(symbolName, action); if (action.isNative()) { ActionSymbolName nativeActionSymName = LangModelUtils.getNativeActionSymName(action.getName(), connectorDef.getName(), action.getPackagePath(), paramTypes); BLangSymbol nativeAction = nativeScope.resolve(nativeActionSymName); if (nativeAction == null || !(nativeAction instanceof NativeUnitProxy)) { BLangExceptionHelper.throwSemanticError(connectorDef, SemanticErrors.UNDEFINED_NATIVE_ACTION, action.getName(), connectorDef.getName()); return; } action.setNativeAction((NativeUnitProxy) nativeAction); } ParameterDef[] returnParameters = action.getReturnParameters(); BType[] returnTypes = new BType[returnParameters.length]; for (int i = 0; i < returnParameters.length; i++) { ParameterDef paramDef = returnParameters[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); returnTypes[i] = bType; } action.setReturnParamTypes(returnTypes); if (action.getWorkers().length > 0) { defineWorkers(action.getWorkers(), action); } } private void defineServices(Service[] services) { for (Service service : services) { if (currentScope.resolve(service.getSymbolName()) != null) { BLangExceptionHelper.throwSemanticError(service, SemanticErrors.REDECLARED_SYMBOL, service.getName()); } currentScope.define(service.getSymbolName(), service); openScope(service); for (Resource resource : service.getResources()) { defineResource(resource, service); } closeScope(); } } private void defineResource(Resource resource, Service service) { ParameterDef[] paramDefArray = resource.getParameterDefs(); BType[] paramTypes = new BType[paramDefArray.length]; for (int i = 0; i < paramDefArray.length; i++) { ParameterDef paramDef = paramDefArray[i]; BType bType = BTypes.resolveType(paramDef.getTypeName(), currentScope, paramDef.getNodeLocation()); paramDef.setType(bType); paramTypes[i] = bType; } resource.setParameterTypes(paramTypes); SymbolName symbolName = LangModelUtils.getResourceSymName(resource.getName(), resource.getPackagePath(), service.getName()); resource.setSymbolName(symbolName); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(resource, SemanticErrors.REDECLARED_SYMBOL, resource.getName()); } currentScope.define(symbolName, resource); if (resource.getWorkers().length > 0) { defineWorkers(resource.getWorkers(), resource); } } private void defineStructs(StructDef[] structDefs) { for (StructDef structDef : structDefs) { SymbolName symbolName = new SymbolName(structDef.getName(), structDef.getPackagePath()); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(structDef, SemanticErrors.REDECLARED_SYMBOL, structDef.getName()); } currentScope.define(symbolName, structDef); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder( structDef.getNodeLocation(), structDef); for (VariableDefStmt variableDefStmt : structDef.getFieldDefStmts()) { blockStmtBuilder.addStmt(variableDefStmt); } BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(structDef); functionBuilder.setNodeLocation(structDef.getNodeLocation()); functionBuilder.setIdentifier(new Identifier(structDef + ".<init>")); functionBuilder.setPkgPath(structDef.getPackagePath()); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); structDef.setInitFunction(functionBuilder.buildFunction()); } for (StructDef structDef : structDefs) { SymbolScope tmpScope = currentScope; currentScope = structDef; for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) { fieldDefStmt.getVariableDef().setKind(VariableDef.Kind.STRUCT_FIELD); fieldDefStmt.accept(this); } currentScope = tmpScope; } for (StructDef structDef : structDefs) { TypeLattice.addStructEdges(structDef, currentScope); } } /** * Add the annotation definitions to the current scope. * * @param annotationDefs Annotations definitions list */ private void defineAnnotations(AnnotationDef[] annotationDefs) { for (AnnotationDef annotationDef : annotationDefs) { SymbolName symbolName = new SymbolName(annotationDef.getName(), currentPkg); if (currentScope.resolve(symbolName) != null) { BLangExceptionHelper.throwSemanticError(annotationDef, SemanticErrors.REDECLARED_SYMBOL, annotationDef.getSymbolName().getName()); } currentScope.define(symbolName, annotationDef); } } /** * Create the '<init>' function and inject it to the connector. * * @param connectorDef connector model object */ private void createConnectorInitFunction(BallerinaConnectorDef connectorDef) { NodeLocation location = connectorDef.getNodeLocation(); BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(connectorDef); functionBuilder.setNodeLocation(location); functionBuilder.setIdentifier(new Identifier(connectorDef.getName() + ".<init>")); functionBuilder.setPkgPath(connectorDef.getPackagePath()); ParameterDef paramDef = new ParameterDef(location, null, new Identifier("connector"), null, new SymbolName("connector"), functionBuilder.getCurrentScope()); paramDef.setType(connectorDef); functionBuilder.addParameter(paramDef); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(location, connectorDef); for (VariableDefStmt variableDefStmt : connectorDef.getVariableDefStmts()) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); blockStmtBuilder.addStmt(assignStmt); } ReturnStmt returnStmt = new ReturnStmt(location, null, new Expression[0]); blockStmtBuilder.addStmt(returnStmt); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); connectorDef.setInitFunction(functionBuilder.buildFunction()); } /** * Create the '<init>' function and inject it to the service. * * @param service service model object */ private void createServiceInitFunction(Service service) { NodeLocation location = service.getNodeLocation(); BallerinaFunction.BallerinaFunctionBuilder functionBuilder = new BallerinaFunction.BallerinaFunctionBuilder(service); functionBuilder.setNodeLocation(location); functionBuilder.setIdentifier(new Identifier(service.getName() + ".<init>")); functionBuilder.setPkgPath(service.getPackagePath()); BlockStmt.BlockStmtBuilder blockStmtBuilder = new BlockStmt.BlockStmtBuilder(location, service); for (VariableDefStmt variableDefStmt : service.getVariableDefStmts()) { AssignStmt assignStmt = new AssignStmt(variableDefStmt.getNodeLocation(), new Expression[]{variableDefStmt.getLExpr()}, variableDefStmt.getRExpr()); blockStmtBuilder.addStmt(assignStmt); } ReturnStmt returnStmt = new ReturnStmt(location, null, new Expression[0]); blockStmtBuilder.addStmt(returnStmt); blockStmtBuilder.setBlockKind(StatementKind.CALLABLE_UNIT_BLOCK); functionBuilder.setBody(blockStmtBuilder.build()); service.setInitFunction(functionBuilder.buildFunction()); } private void resolveStructFieldTypes(StructDef[] structDefs) { for (StructDef structDef : structDefs) { for (VariableDefStmt fieldDefStmt : structDef.getFieldDefStmts()) { VariableDef fieldDef = fieldDefStmt.getVariableDef(); BType fieldType = BTypes.resolveType(fieldDef.getTypeName(), currentScope, fieldDef.getNodeLocation()); fieldDef.setType(fieldType); } } } private void checkUnreachableStmt(Statement[] stmts, int stmtIndex) { if (stmts.length > stmtIndex) { if (stmts[stmtIndex] instanceof CommentStmt) { checkUnreachableStmt(stmts, ++stmtIndex); } else { BLangExceptionHelper.throwSemanticError(stmts[stmtIndex], SemanticErrors.UNREACHABLE_STATEMENT); } } } /** * Recursively visits a nested init expression. Reconstruct the init expression with the * specific init expression type, and replaces the generic {@link RefTypeInitExpr}. * * @param fieldType Type of the current field * @return reconstructed nested init expression */ private RefTypeInitExpr getNestedInitExpr(Expression expr, BType fieldType) { RefTypeInitExpr refTypeInitExpr = (RefTypeInitExpr) expr; if (refTypeInitExpr instanceof ArrayInitExpr) { if (fieldType == BTypes.typeAny || fieldType == BTypes.typeMap) { fieldType = BTypes.resolveType(new SimpleTypeName(BTypes.typeAny.getName(), true, 1), currentScope, expr.getNodeLocation()); } else if (getElementType(fieldType) == BTypes.typeJSON) { refTypeInitExpr = new JSONArrayInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } } else { if (fieldType == BTypes.typeAny) { fieldType = BTypes.typeMap; } if (fieldType == BTypes.typeMap) { refTypeInitExpr = new MapInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } else if (fieldType == BTypes.typeJSON || fieldType instanceof BJSONConstraintType) { refTypeInitExpr = new JSONInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } else if (fieldType instanceof StructDef) { refTypeInitExpr = new StructInitExpr(refTypeInitExpr.getNodeLocation(), refTypeInitExpr.getWhiteSpaceDescriptor(), refTypeInitExpr.getArgExprs()); } if (refTypeInitExpr instanceof ConnectorInitExpr) { ConnectorInitExpr filterConnectorInitExpr = ((ConnectorInitExpr) refTypeInitExpr). getParentConnectorInitExpr(); BType type = null; while (filterConnectorInitExpr != null) { BLangSymbol symbol = currentPackageScope.resolve(new SymbolName(filterConnectorInitExpr. getTypeName().getName(), currentPkg)); if (symbol instanceof BallerinaConnectorDef) { type = (BType) symbol; filterConnectorInitExpr.setInheritedType(type); type = BTypes.resolveType(((BallerinaConnectorDef) symbol). getFilterSupportedType(), currentScope, refTypeInitExpr.getNodeLocation()); if (type != null) { filterConnectorInitExpr.setFilterSupportedType(type); } } filterConnectorInitExpr = (filterConnectorInitExpr). getParentConnectorInitExpr(); } } } refTypeInitExpr.setInheritedType(fieldType); return refTypeInitExpr; } private BType getElementType(BType type) { if (type.getTag() != TypeTags.ARRAY_TAG) { return type; } return getElementType(((BArrayType) type).getElementType()); } /** * Visit and validate map/json initialize expression. * * @param initExpr Expression to visit. */ private void visitMapJsonInitExpr(RefTypeInitExpr initExpr) { BType inheritedType = initExpr.getInheritedType(); initExpr.setType(inheritedType); Expression[] argExprs = initExpr.getArgExprs(); for (int i = 0; i < argExprs.length; i++) { Expression argExpr = argExprs[i]; KeyValueExpr keyValueExpr = (KeyValueExpr) argExpr; Expression keyExpr = keyValueExpr.getKeyExpr(); if (keyExpr instanceof SimpleVarRefExpr) { BString key = new BString(((SimpleVarRefExpr) keyExpr).getVarName()); keyExpr = new BasicLiteral(keyExpr.getNodeLocation(), keyExpr.getWhiteSpaceDescriptor(), new SimpleTypeName(TypeConstants.STRING_TNAME), key); keyValueExpr.setKeyExpr(keyExpr); } visitSingleValueExpr(keyExpr); Expression valueExpr = keyValueExpr.getValueExpr(); if (inheritedType instanceof BJSONConstraintType) { String key = ((BasicLiteral) keyExpr).getBValue().stringValue(); StructDef constraintStructDef = (StructDef) ((BJSONConstraintType) inheritedType).getConstraint(); if (constraintStructDef != null) { BLangSymbol varDefSymbol = constraintStructDef.resolveMembers( new SymbolName(key, constraintStructDef.getPackagePath())); if (varDefSymbol == null) { throw BLangExceptionHelper.getSemanticError(keyExpr.getNodeLocation(), SemanticErrors.UNKNOWN_FIELD_IN_JSON_STRUCT, key, constraintStructDef.getName()); } VariableDef varDef = (VariableDef) varDefSymbol; BType cJSONFieldType = new BJSONConstraintType(varDef.getType()); if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, cJSONFieldType); keyValueExpr.setValueExpr(valueExpr); } } } else { if (valueExpr instanceof RefTypeInitExpr) { valueExpr = getNestedInitExpr(valueExpr, inheritedType); keyValueExpr.setValueExpr(valueExpr); } } valueExpr.accept(this); BType valueExprType = valueExpr.getType(); if (inheritedType == BTypes.typeMap) { if (BTypes.isValueType(valueExprType)) { TypeCastExpression newExpr = checkWideningPossible(BTypes.typeAny, valueExpr); if (newExpr != null) { keyValueExpr.setValueExpr(newExpr); } else { BLangExceptionHelper.throwSemanticError(keyValueExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExprType.getSymbolName(), inheritedType); } } continue; } if (BTypes.isValueType(valueExprType)) { TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr); if (typeCastExpr != null) { keyValueExpr.setValueExpr(typeCastExpr); } else { BLangExceptionHelper.throwSemanticError(keyValueExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExprType.getSymbolName(), inheritedType.getSymbolName()); } continue; } if (valueExprType != BTypes.typeNull && isAssignableTo(BTypes.typeJSON, valueExprType)) { continue; } TypeCastExpression typeCastExpr = checkWideningPossible(BTypes.typeJSON, valueExpr); if (typeCastExpr == null) { BLangExceptionHelper.throwSemanticError(initExpr, SemanticErrors.INCOMPATIBLE_TYPES_CANNOT_CONVERT, valueExpr.getType(), BTypes.typeJSON); } keyValueExpr.setValueExpr(typeCastExpr); } } private void addDependentPkgInitCalls(List<BallerinaFunction> initFunctionList, BlockStmt.BlockStmtBuilder blockStmtBuilder, NodeLocation initFuncLocation) { for (BallerinaFunction initFunc : initFunctionList) { FunctionInvocationExpr funcIExpr = new FunctionInvocationExpr(initFuncLocation, null, initFunc.getName(), null, initFunc.getPackagePath(), new Expression[]{}); funcIExpr.setCallableUnit(initFunc); FunctionInvocationStmt funcIStmt = new FunctionInvocationStmt(initFuncLocation, funcIExpr); blockStmtBuilder.addStmt(funcIStmt); } } private boolean isAssignableTo(BType lhsType, BType rhsType) { if (lhsType == BTypes.typeAny) { return true; } if (rhsType == BTypes.typeNull && !BTypes.isValueType(lhsType)) { return true; } if (lhsType == BTypes.typeJSON && rhsType.getTag() == TypeTags.C_JSON_TAG) { return true; } return lhsType == rhsType || lhsType.equals(rhsType); } private boolean checkUnsafeCastPossible(BType sourceType, BType targetType) { if (sourceType == BTypes.typeAny || targetType == BTypes.typeAny) { return true; } if (sourceType instanceof StructDef && targetType instanceof StructDef) { return true; } if (targetType.getTag() == TypeTags.ARRAY_TAG || sourceType.getTag() == TypeTags.ARRAY_TAG) { return isUnsafeArrayCastPossible(sourceType, targetType); } if (sourceType.getTag() == TypeTags.JSON_TAG && targetType.getTag() == TypeTags.C_JSON_TAG) { return true; } return false; } private boolean isUnsafeArrayCastPossible(BType sourceType, BType targetType) { if (targetType.getTag() == TypeTags.ARRAY_TAG && sourceType.getTag() == TypeTags.ARRAY_TAG) { BArrayType sourceArrayType = (BArrayType) sourceType; BArrayType targetArrayType = (BArrayType) targetType; return isUnsafeArrayCastPossible(sourceArrayType.getElementType(), targetArrayType.getElementType()); } else if (targetType.getTag() == TypeTags.ARRAY_TAG) { if (sourceType == BTypes.typeJSON) { return isUnsafeArrayCastPossible(BTypes.typeJSON, ((BArrayType) targetType).getElementType()); } return sourceType == BTypes.typeAny; } else if (sourceType.getTag() == TypeTags.ARRAY_TAG) { if (targetType == BTypes.typeJSON) { return isUnsafeArrayCastPossible(((BArrayType) sourceType).getElementType(), BTypes.typeJSON); } return targetType == BTypes.typeAny; } if (sourceType == targetType) { return true; } if (targetType == BTypes.typeAny && !BTypes.isValueType(sourceType)) { return true; } return !BTypes.isValueType(targetType) && sourceType == BTypes.typeAny; } private AssignabilityResult performAssignabilityCheck(BType lhsType, Expression rhsExpr) { AssignabilityResult assignabilityResult = new AssignabilityResult(); BType rhsType = rhsExpr.getType(); if (lhsType == rhsType) { assignabilityResult.assignable = true; return assignabilityResult; } if (rhsType == BTypes.typeNull && !BTypes.isValueType(lhsType)) { assignabilityResult.assignable = true; return assignabilityResult; } if ((rhsType instanceof BJSONConstraintType) && (lhsType == BTypes.typeJSON)) { assignabilityResult.assignable = true; return assignabilityResult; } if ((rhsType instanceof BJSONConstraintType) && (lhsType instanceof BJSONConstraintType)) { if (((BJSONConstraintType) lhsType).getConstraint() == ((BJSONConstraintType) rhsType).getConstraint()) { assignabilityResult.assignable = true; return assignabilityResult; } } TypeCastExpression implicitCastExpr = checkWideningPossible(lhsType, rhsExpr); if (implicitCastExpr != null) { assignabilityResult.assignable = true; assignabilityResult.expression = implicitCastExpr; return assignabilityResult; } if (isImplicitiCastPossible(lhsType, rhsType)) { implicitCastExpr = new TypeCastExpression(rhsExpr.getNodeLocation(), null, rhsExpr, lhsType); implicitCastExpr.setOpcode(InstructionCodes.NOP); assignabilityResult.assignable = true; assignabilityResult.expression = implicitCastExpr; return assignabilityResult; } if (lhsType == BTypes.typeFloat && rhsType == BTypes.typeInt && rhsExpr instanceof BasicLiteral) { BasicLiteral newExpr = new BasicLiteral(rhsExpr.getNodeLocation(), rhsExpr.getWhiteSpaceDescriptor(), new SimpleTypeName(TypeConstants.FLOAT_TNAME), new BFloat(((BasicLiteral) rhsExpr) .getBValue().intValue())); visitSingleValueExpr(newExpr); assignabilityResult.assignable = true; assignabilityResult.expression = newExpr; return assignabilityResult; } if (rhsType instanceof BFunctionType && lhsType instanceof BFunctionType) { BFunctionType rhs = (BFunctionType) rhsType; BFunctionType lhs = (BFunctionType) lhsType; if (rhs.getParameterType().length == lhs.getParameterType().length && rhs.getReturnParameterType().length == lhs.getReturnParameterType().length) { for (int i = 0; i < rhs.getParameterType().length; i++) { if (!isAssignableTo(rhs.getParameterType()[i], lhs.getParameterType()[i])) { return assignabilityResult; } } for (int i = 0; i < rhs.getReturnParameterType().length; i++) { if (!isAssignableTo(rhs.getReturnParameterType()[i], lhs.getReturnParameterType()[i])) { return assignabilityResult; } } assignabilityResult.assignable = true; return assignabilityResult; } } return assignabilityResult; } private boolean isImplicitiCastPossible(BType lhsType, BType rhsType) { if (lhsType == BTypes.typeAny) { return true; } if (lhsType.getTag() == TypeTags.ARRAY_TAG || rhsType.getTag() == TypeTags.ARRAY_TAG) { return isImplicitArrayCastPossible(lhsType, rhsType); } return false; } private boolean isImplicitArrayCastPossible(BType lhsType, BType rhsType) { if (lhsType.getTag() == TypeTags.ARRAY_TAG && rhsType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) lhsType; BArrayType rhsArrayType = (BArrayType) rhsType; return isImplicitArrayCastPossible(lhrArrayType.getElementType(), rhsArrayType.getElementType()); } else if (rhsType.getTag() == TypeTags.ARRAY_TAG) { return lhsType == BTypes.typeAny; } else if (lhsType.getTag() == TypeTags.ARRAY_TAG) { return false; } if (lhsType == rhsType) { return true; } return lhsType.getTag() == BTypes.typeAny.getTag() && !BTypes.isValueType(rhsType); } /** * Helper method to add return statement if required. * * @param returnParamCount No of return parameters. * @param blockStmt Block statement to which to add the return statement. */ private void checkAndAddReturnStmt(int returnParamCount, BlockStmt blockStmt) { if (returnParamCount != 0) { return; } Statement[] statements = blockStmt.getStatements(); int length = statements.length; Statement lastStatement = statements[length - 1]; if (!(lastStatement instanceof ReturnStmt)) { NodeLocation blockLocation = blockStmt.getNodeLocation(); NodeLocation endOfBlock = new NodeLocation(blockLocation.getPackageDirPath(), blockLocation.getFileName(), blockLocation.stopLineNumber); ReturnStmt returnStmt = new ReturnStmt(endOfBlock, null, new Expression[0]); statements = Arrays.copyOf(statements, length + 1); statements[length] = returnStmt; blockStmt.setStatements(statements); } } private void checkAndAddReplyStmt(BlockStmt blockStmt) { Statement[] statements = blockStmt.getStatements(); int length = statements.length; Statement lastStatement = statements[length - 1]; if (!(lastStatement instanceof ReplyStmt)) { NodeLocation blockLocation = blockStmt.getNodeLocation(); NodeLocation endOfBlock = new NodeLocation(blockLocation.getPackageDirPath(), blockLocation.getFileName(), blockLocation.stopLineNumber); ReplyStmt replyStmt = new ReplyStmt(endOfBlock, null, null); statements = Arrays.copyOf(statements, length + 1); statements[length] = replyStmt; blockStmt.setStatements(statements); } } private void assignVariableRefTypes(Expression[] expr, BType[] returnTypes) { for (int i = 0; i < expr.length; i++) { if (expr[i] instanceof SimpleVarRefExpr && ((SimpleVarRefExpr) expr[i]).getVarName().equals("_")) { continue; } ((SimpleVarRefExpr) expr[i]).getVariableDef().setType(returnTypes[i]); } } private static void checkParent(Statement stmt) { Statement parent = stmt; StatementKind childStmtType = stmt.getKind(); while (StatementKind.CALLABLE_UNIT_BLOCK != parent.getKind()) { if (StatementKind.WHILE_BLOCK == parent.getKind() && (StatementKind.BREAK == childStmtType || StatementKind.CONTINUE == childStmtType)) { return; } else if (StatementKind.TRANSACTION_BLOCK == parent.getKind()) { if (StatementKind.BREAK == childStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.BREAK_USED_IN_TRANSACTION); } else if (StatementKind.CONTINUE == childStmtType) { BLangExceptionHelper.throwSemanticError(stmt, SemanticErrors.CONTINUE_USED_IN_TRANSACTION); } } parent = parent.getParent(); } } /** * Get the XML namespaces that are visible to to the current scope. * * @param location Source location of the ballerina file * @return XML namespaces that are visible to the current scope, as a map */ private Map<String, Expression> getNamespaceInScope(NodeLocation location) { Map<String, Expression> namespaces = new HashMap<String, Expression>(); SymbolScope scope = currentScope; while (true) { for (Entry<SymbolName, BLangSymbol> symbols : scope.getSymbolMap().entrySet()) { SymbolName symbolName = symbols.getKey(); if (!(symbolName instanceof NamespaceSymbolName)) { continue; } NamespaceDeclaration namespaceDecl = (NamespaceDeclaration) symbols.getValue(); if (!namespaces.containsKey(namespaceDecl.getPrefix()) && !namespaces.containsValue(namespaceDecl.getNamespaceUri())) { BasicLiteral namespaceUriLiteral = new BasicLiteral(location, null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(namespaceDecl.getNamespaceUri())); namespaceUriLiteral.accept(this); namespaces.put(namespaceDecl.getPrefix(), namespaceUriLiteral); } } if (scope instanceof BLangPackage) { break; } scope = scope.getEnclosingScope(); } return namespaces; } /** * Create and return an XML concatenation expression using using the provided expressions. * Expressions can only be either XML type or string type. All the string type expressions * will be converted to XML text literals ({@link XMLTextLiteral}). * * @param items Expressions to create concatenating expression. * @return XML concatenating expression */ private Expression getXMLConcatExpression(Expression[] items) { if (items.length == 0) { return null; } Expression concatExpr = null; for (int i = 0; i < items.length; i++) { Expression currentItem = items[i]; if (currentItem.getType() == BTypes.typeString) { currentItem = new XMLTextLiteral(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), currentItem); items[0] = currentItem; } if (concatExpr == null) { concatExpr = currentItem; continue; } concatExpr = new AddExpression(currentItem.getNodeLocation(), currentItem.getWhiteSpaceDescriptor(), concatExpr, currentItem); concatExpr.setType(BTypes.typeXML); } return concatExpr; } private void validateXMLQname(XMLQNameExpr qname, Map<String, Expression> namespaces, Expression defaultNsUri) { qname.setType(BTypes.typeString); String prefix = qname.getPrefix(); if (prefix.isEmpty()) { qname.setNamepsaceUri(defaultNsUri); return; } if (namespaces.containsKey(qname.getPrefix())) { Expression namespaceUri = namespaces.get(qname.getPrefix()); qname.setNamepsaceUri(namespaceUri); } else if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { BLangExceptionHelper.throwSemanticError(qname, SemanticErrors.INVALID_NAMESPACE_PREFIX, prefix); } else { BLangExceptionHelper.throwSemanticError(qname, SemanticErrors.UNDEFINED_NAMESPACE, qname.getPrefix()); } } private void validateXMLLiteralAttributes(List<KeyValueExpr> attributes, Map<String, Expression> namespaces) { for (KeyValueExpr attribute : attributes) { Expression attrNameExpr = attribute.getKeyExpr(); if (attrNameExpr instanceof XMLQNameExpr) { XMLQNameExpr attrQNameRefExpr = (XMLQNameExpr) attrNameExpr; attrQNameRefExpr.isUsedInXML(); BasicLiteral emptyNsUriLiteral = new BasicLiteral(attrNameExpr.getNodeLocation(), null, new SimpleTypeName(TypeConstants.STRING_TNAME), new BString(XMLConstants.NULL_NS_URI)); emptyNsUriLiteral.accept(this); validateXMLQname(attrQNameRefExpr, namespaces, emptyNsUriLiteral); } else { attrNameExpr.accept(this); if (attrNameExpr.getType() != BTypes.typeString) { attrNameExpr = createImplicitStringConversionExpr(attrNameExpr, attrNameExpr.getType()); attribute.setKeyExpr(attrNameExpr); } } Expression attrValueExpr = attribute.getValueExpr(); attrValueExpr.accept(this); if (attrValueExpr.getType() != BTypes.typeString) { attrValueExpr = createImplicitStringConversionExpr(attrValueExpr, attrValueExpr.getType()); attribute.setValueExpr(attrValueExpr); } } } private void validateXMLLiteralEndTag(XMLElementLiteral xmlElementLiteral, Expression defaultNsUri) { Expression startTagName = xmlElementLiteral.getStartTagName(); Expression endTagName = xmlElementLiteral.getEndTagName(); if (endTagName != null) { if (startTagName instanceof XMLQNameExpr && endTagName instanceof XMLQNameExpr) { XMLQNameExpr startName = (XMLQNameExpr) startTagName; XMLQNameExpr endName = (XMLQNameExpr) endTagName; if (!startName.getPrefix().equals(endName.getPrefix()) || !startName.getLocalname().equals(endName.getLocalname())) { BLangExceptionHelper.throwSemanticError(endTagName, SemanticErrors.XML_TAGS_MISMATCH); } } if (((startTagName instanceof XMLQNameExpr) && !(endTagName instanceof XMLQNameExpr)) || (!(startTagName instanceof XMLQNameExpr) && (endTagName instanceof XMLQNameExpr))) { BLangExceptionHelper.throwSemanticError(endTagName, SemanticErrors.XML_TAGS_MISMATCH); } if (endTagName instanceof XMLQNameExpr) { validateXMLQname((XMLQNameExpr) endTagName, xmlElementLiteral.getNamespaces(), defaultNsUri); } else { endTagName.accept(this); } if (endTagName.getType() != BTypes.typeString) { endTagName = createImplicitStringConversionExpr(endTagName, endTagName.getType()); xmlElementLiteral.setEndTagName(endTagName); } } } private Expression createImplicitStringConversionExpr(Expression sExpr, BType sType) { Expression conversionExpr = getImplicitConversionExpr(sExpr, sType, BTypes.typeString); if (conversionExpr == null) { BLangExceptionHelper.throwSemanticError(sExpr, SemanticErrors.INCOMPATIBLE_TYPES, BTypes.typeString, sType); } return conversionExpr; } /** * This class holds the results of the type assignability check. * * @since 0.88 */ static class AssignabilityResult { boolean assignable; Expression expression; } }
adaptive scheduler -> dynamic graph This is more accurate.
public boolean canBeReleased() { if (releasedPartitionGroups.size() != edgeManager.getNumberOfConsumedPartitionGroupsById(partitionId)) { return false; } for (JobVertexID jobVertexID : consumerVertices) { if (!producer.getExecutionGraphAccessor().getJobVertex(jobVertexID).isInitialized()) { return false; } } return true; }
public boolean canBeReleased() { if (releasablePartitionGroups.size() != edgeManager.getNumberOfConsumedPartitionGroupsById(partitionId)) { return false; } for (JobVertexID jobVertexId : totalResult.getConsumerVertices()) { if (!producer.getExecutionGraphAccessor().getJobVertex(jobVertexId).isInitialized()) { return false; } } return true; }
class IntermediateResultPartition { private static final int UNKNOWN = -1; private final IntermediateResult totalResult; private final ExecutionVertex producer; private final IntermediateResultPartitionID partitionId; private final EdgeManager edgeManager; /** Number of subpartitions. Initialized lazily and will not change once set. */ private int numberOfSubpartitions = UNKNOWN; /** Whether this partition has produced some data. */ private boolean hasDataProduced = false; /** * Released {@link ConsumedPartitionGroup}s for this result partition. This result partition can * be released if all {@link ConsumedPartitionGroup}s are released. */ private final Set<ConsumedPartitionGroup> releasedPartitionGroups = new HashSet<>(); /** All consumer job vertex ids of the corresponding {@link IntermediateResult}. */ private final List<JobVertexID> consumerVertices; public IntermediateResultPartition( IntermediateResult totalResult, ExecutionVertex producer, int partitionNumber, EdgeManager edgeManager) { this.totalResult = totalResult; this.producer = producer; this.partitionId = new IntermediateResultPartitionID(totalResult.getId(), partitionNumber); this.edgeManager = edgeManager; this.consumerVertices = totalResult.getConsumerVertices(); } public void releaseConsumedPartitionGroup(ConsumedPartitionGroup partitionGroup) { releasedPartitionGroups.add(partitionGroup); } public ExecutionVertex getProducer() { return producer; } public int getPartitionNumber() { return partitionId.getPartitionNumber(); } public IntermediateResult getIntermediateResult() { return totalResult; } public IntermediateResultPartitionID getPartitionId() { return partitionId; } public ResultPartitionType getResultType() { return totalResult.getResultType(); } public List<ConsumerVertexGroup> getConsumerVertexGroups() { return getEdgeManager().getConsumerVertexGroupsForPartition(partitionId); } public List<ConsumedPartitionGroup> getConsumedPartitionGroups() { return getEdgeManager().getConsumedPartitionGroupsById(partitionId); } public int getNumberOfSubpartitions() { if (numberOfSubpartitions == UNKNOWN) { numberOfSubpartitions = computeNumberOfSubpartitions(); checkState( numberOfSubpartitions > 0, "Number of subpartitions is an unexpected value: " + numberOfSubpartitions); } return numberOfSubpartitions; } private int computeNumberOfSubpartitions() { if (!getProducer().getExecutionGraphAccessor().isDynamic()) { List<ConsumerVertexGroup> consumerVertexGroups = getConsumerVertexGroups(); checkState(!consumerVertexGroups.isEmpty()); return consumerVertexGroups.get(0).size(); } else { if (totalResult.isBroadcast()) { return 1; } else { return computeNumberOfMaxPossiblePartitionConsumers(); } } } private int computeNumberOfMaxPossiblePartitionConsumers() { final DistributionPattern distributionPattern = getIntermediateResult().getConsumingDistributionPattern(); int maxConsumerJobVertexParallelism = getIntermediateResult().getConsumersParallelism(); if (maxConsumerJobVertexParallelism <= 0) { maxConsumerJobVertexParallelism = getIntermediateResult().getConsumersMaxParallelism(); checkState( maxConsumerJobVertexParallelism > 0, "Neither the parallelism nor the max parallelism of a job vertex is set"); } if (distributionPattern == DistributionPattern.ALL_TO_ALL) { return maxConsumerJobVertexParallelism; } else { int numberOfPartitions = getIntermediateResult().getNumParallelProducers(); return (int) Math.ceil(((double) maxConsumerJobVertexParallelism) / numberOfPartitions); } } public void markDataProduced() { hasDataProduced = true; } public boolean isConsumable() { return hasDataProduced; } void resetForNewExecution() { if (!getResultType().canBePipelinedConsumed() && hasDataProduced) { for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { consumedPartitionGroup.partitionUnfinished(); } } releasedPartitionGroups.clear(); hasDataProduced = false; for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { totalResult.clearCachedInformationForPartitionGroup(consumedPartitionGroup); } } public void addConsumers(ConsumerVertexGroup consumers) { getEdgeManager().connectPartitionWithConsumerVertexGroup(partitionId, consumers); } private EdgeManager getEdgeManager() { return edgeManager; } void markFinished() { if (getResultType().canBePipelinedConsumed()) { throw new IllegalStateException( "Tried to mark a non-blocking result partition as finished"); } if (hasDataProduced) { throw new IllegalStateException( "Tried to mark a finished result partition as finished."); } hasDataProduced = true; for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { consumedPartitionGroup.partitionFinished(); } } }
class IntermediateResultPartition { private static final int UNKNOWN = -1; private final IntermediateResult totalResult; private final ExecutionVertex producer; private final IntermediateResultPartitionID partitionId; private final EdgeManager edgeManager; /** Number of subpartitions. Initialized lazily and will not change once set. */ private int numberOfSubpartitions = UNKNOWN; /** Whether this partition has produced some data. */ private boolean hasDataProduced = false; /** * Releasable {@link ConsumedPartitionGroup}s for this result partition. This result partition * can be released if all {@link ConsumedPartitionGroup}s are releasable. */ private final Set<ConsumedPartitionGroup> releasablePartitionGroups = new HashSet<>(); public IntermediateResultPartition( IntermediateResult totalResult, ExecutionVertex producer, int partitionNumber, EdgeManager edgeManager) { this.totalResult = totalResult; this.producer = producer; this.partitionId = new IntermediateResultPartitionID(totalResult.getId(), partitionNumber); this.edgeManager = edgeManager; } public void markPartitionGroupReleasable(ConsumedPartitionGroup partitionGroup) { releasablePartitionGroups.add(partitionGroup); } public ExecutionVertex getProducer() { return producer; } public int getPartitionNumber() { return partitionId.getPartitionNumber(); } public IntermediateResult getIntermediateResult() { return totalResult; } public IntermediateResultPartitionID getPartitionId() { return partitionId; } public ResultPartitionType getResultType() { return totalResult.getResultType(); } public List<ConsumerVertexGroup> getConsumerVertexGroups() { return getEdgeManager().getConsumerVertexGroupsForPartition(partitionId); } public List<ConsumedPartitionGroup> getConsumedPartitionGroups() { return getEdgeManager().getConsumedPartitionGroupsById(partitionId); } public int getNumberOfSubpartitions() { if (numberOfSubpartitions == UNKNOWN) { numberOfSubpartitions = computeNumberOfSubpartitions(); checkState( numberOfSubpartitions > 0, "Number of subpartitions is an unexpected value: " + numberOfSubpartitions); } return numberOfSubpartitions; } private int computeNumberOfSubpartitions() { if (!getProducer().getExecutionGraphAccessor().isDynamic()) { List<ConsumerVertexGroup> consumerVertexGroups = getConsumerVertexGroups(); checkState(!consumerVertexGroups.isEmpty()); return consumerVertexGroups.get(0).size(); } else { if (totalResult.isBroadcast()) { return 1; } else { return computeNumberOfMaxPossiblePartitionConsumers(); } } } private int computeNumberOfMaxPossiblePartitionConsumers() { final DistributionPattern distributionPattern = getIntermediateResult().getConsumingDistributionPattern(); int maxConsumerJobVertexParallelism = getIntermediateResult().getConsumersParallelism(); if (maxConsumerJobVertexParallelism <= 0) { maxConsumerJobVertexParallelism = getIntermediateResult().getConsumersMaxParallelism(); checkState( maxConsumerJobVertexParallelism > 0, "Neither the parallelism nor the max parallelism of a job vertex is set"); } if (distributionPattern == DistributionPattern.ALL_TO_ALL) { return maxConsumerJobVertexParallelism; } else { int numberOfPartitions = getIntermediateResult().getNumParallelProducers(); return (int) Math.ceil(((double) maxConsumerJobVertexParallelism) / numberOfPartitions); } } public void markDataProduced() { hasDataProduced = true; } public boolean isConsumable() { return hasDataProduced; } void resetForNewExecution() { if (!getResultType().canBePipelinedConsumed() && hasDataProduced) { for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { consumedPartitionGroup.partitionUnfinished(); } } releasablePartitionGroups.clear(); hasDataProduced = false; for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { totalResult.clearCachedInformationForPartitionGroup(consumedPartitionGroup); } } public void addConsumers(ConsumerVertexGroup consumers) { getEdgeManager().connectPartitionWithConsumerVertexGroup(partitionId, consumers); } private EdgeManager getEdgeManager() { return edgeManager; } void markFinished() { if (getResultType().canBePipelinedConsumed()) { throw new IllegalStateException( "Tried to mark a non-blocking result partition as finished"); } if (hasDataProduced) { throw new IllegalStateException( "Tried to mark a finished result partition as finished."); } hasDataProduced = true; for (ConsumedPartitionGroup consumedPartitionGroup : getConsumedPartitionGroups()) { consumedPartitionGroup.partitionFinished(); } } }
No? The only thing I can find that calls this code is ONNX-models, but they also explicitly throw if the type is URI ...
public FileReference addUri(String uri, Path path) { throw new UnsupportedOperationException("URI type is not supported"); /* TODO: this needs to be super-restricted if the config server should ever do this. try (TmpDir tmp = new TmpDir()) { return addFile(download(uri, tmp.dir, path.getRelative())); } catch (IOException e) { throw new IllegalArgumentException(e); } */ }
throw new UnsupportedOperationException("URI type is not supported");
public FileReference addUri(String uri, Path path) { throw new UnsupportedOperationException("URI type is not supported"); /* TODO: this needs to be super-restricted if the config server should ever do this. try (TmpDir tmp = new TmpDir()) { return addFile(download(uri, tmp.dir, path.getRelative())); } catch (IOException e) { throw new IllegalArgumentException(e); } */ }
class ApplicationFileManager implements AddFileInterface { private final File applicationDir; private final FileDirectory fileDirectory; ApplicationFileManager(File applicationDir, FileDirectory fileDirectory) { this.applicationDir = applicationDir; this.fileDirectory = fileDirectory; } @Override public FileReference addFile(Path path) throws IOException { File file = new File(applicationDir, path.getRelative()); return addFile(file); } private FileReference addFile(File file) throws IOException { return fileDirectory.addFile(file); } @Override @Override public FileReference addBlob(ByteBuffer blob, Path path) { try (TmpDir tmp = new TmpDir()) { return addFile(writeBlob(blob, tmp.dir, path.getRelative())); } catch (IOException e) { throw new IllegalArgumentException(e); } } private File writeBlob(ByteBuffer blob, File tmpDir, String relativePath) { FileOutputStream fos = null; File file = null; try { file = new File(tmpDir, relativePath); Files.createDirectories(file.getParentFile().toPath()); fos = new FileOutputStream(file); if (relativePath.endsWith(".lz4")) { LZ4FrameOutputStream lz4 = new LZ4FrameOutputStream(fos); lz4.write(blob.array(), blob.arrayOffset(), blob.remaining()); lz4.close(); } else { fos.write(blob.array(), blob.arrayOffset(), blob.remaining()); } return file; } catch (IOException e) { throw new IllegalArgumentException("Failed creating temp file", e); } finally { try { if (fos != null) { fos.close(); } } catch (IOException e) { throw new IllegalArgumentException("Failed closing down after writing blob of size " + blob.remaining() + " to " + file); } } } private File download(String uri, File tmpDir, String relativePath) { File file = null; FileOutputStream fos = null; ReadableByteChannel rbc = null; try { file = new File(tmpDir, relativePath); Files.createDirectories(file.getParentFile().toPath()); URL website = new URL(uri); rbc = Channels.newChannel(website.openStream()); fos = new FileOutputStream(file); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); return file; } catch (SocketTimeoutException e) { throw new IllegalArgumentException("Failed connecting to or reading from " + uri, e); } catch (IOException e) { throw new IllegalArgumentException("Failed creating " + file, e); } finally { try { if (fos != null) { fos.close(); } if (rbc != null) { rbc.close(); } } catch (IOException e) { throw new IllegalArgumentException("Failed closing down after downloading " + uri + " to " + file); } } } private static class TmpDir implements Closeable { final File dir = Files.createTempDirectory("").toFile(); private TmpDir() throws IOException { } @Override public void close() { IOUtils.recursiveDeleteDir(dir); } } }
class ApplicationFileManager implements AddFileInterface { private final File applicationDir; private final FileDirectory fileDirectory; ApplicationFileManager(File applicationDir, FileDirectory fileDirectory) { this.applicationDir = applicationDir; this.fileDirectory = fileDirectory; } @Override public FileReference addFile(Path path) throws IOException { File file = new File(applicationDir, path.getRelative()); return addFile(file); } private FileReference addFile(File file) throws IOException { return fileDirectory.addFile(file); } @Override @Override public FileReference addBlob(ByteBuffer blob, Path path) { try (TmpDir tmp = new TmpDir()) { return addFile(writeBlob(blob, tmp.dir, path.getRelative())); } catch (IOException e) { throw new IllegalArgumentException(e); } } private File writeBlob(ByteBuffer blob, File tmpDir, String relativePath) { FileOutputStream fos = null; File file = null; try { file = new File(tmpDir, relativePath); Files.createDirectories(file.getParentFile().toPath()); fos = new FileOutputStream(file); if (relativePath.endsWith(".lz4")) { LZ4FrameOutputStream lz4 = new LZ4FrameOutputStream(fos); lz4.write(blob.array(), blob.arrayOffset(), blob.remaining()); lz4.close(); } else { fos.write(blob.array(), blob.arrayOffset(), blob.remaining()); } return file; } catch (IOException e) { throw new IllegalArgumentException("Failed creating temp file", e); } finally { try { if (fos != null) { fos.close(); } } catch (IOException e) { throw new IllegalArgumentException("Failed closing down after writing blob of size " + blob.remaining() + " to " + file); } } } private File download(String uri, File tmpDir, String relativePath) { File file = null; FileOutputStream fos = null; ReadableByteChannel rbc = null; try { file = new File(tmpDir, relativePath); Files.createDirectories(file.getParentFile().toPath()); URL website = new URL(uri); rbc = Channels.newChannel(website.openStream()); fos = new FileOutputStream(file); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); return file; } catch (SocketTimeoutException e) { throw new IllegalArgumentException("Failed connecting to or reading from " + uri, e); } catch (IOException e) { throw new IllegalArgumentException("Failed creating " + file, e); } finally { try { if (fos != null) { fos.close(); } if (rbc != null) { rbc.close(); } } catch (IOException e) { throw new IllegalArgumentException("Failed closing down after downloading " + uri + " to " + file); } } } private static class TmpDir implements Closeable { final File dir = Files.createTempDirectory("").toFile(); private TmpDir() throws IOException { } @Override public void close() { IOUtils.recursiveDeleteDir(dir); } } }
Ah. I totally missed the second one was not a `else`. Probably better to initialize that one to `Collections.emptyList()` then. That way, you don't have to special case the null. Sorry about that.
private boolean restJsonSupportNeeded(CombinedIndexBuildItem indexBuildItem, DotName mediaTypeAnnotation) { for (AnnotationInstance annotationInstance : indexBuildItem.getIndex().getAnnotations(mediaTypeAnnotation)) { final AnnotationValue annotationValue = annotationInstance.value(); if (annotationValue == null) { continue; } List<String> mediaTypes = null; if (annotationValue.kind() == Kind.ARRAY) { mediaTypes = Arrays.asList(annotationValue.asStringArray()); } else if (annotationValue.kind() == Kind.STRING) { mediaTypes = Collections.singletonList(annotationValue.asString()); } return (mediaTypes != null) && (mediaTypes.contains(MediaType.APPLICATION_JSON) || mediaTypes.contains(MediaType.APPLICATION_JSON_PATCH_JSON)); } return false; }
List<String> mediaTypes = null;
private boolean restJsonSupportNeeded(CombinedIndexBuildItem indexBuildItem, DotName mediaTypeAnnotation) { for (AnnotationInstance annotationInstance : indexBuildItem.getIndex().getAnnotations(mediaTypeAnnotation)) { final AnnotationValue annotationValue = annotationInstance.value(); if (annotationValue == null) { continue; } List<String> mediaTypes = Collections.emptyList(); if (annotationValue.kind() == Kind.ARRAY) { mediaTypes = Arrays.asList(annotationValue.asStringArray()); } else if (annotationValue.kind() == Kind.STRING) { mediaTypes = Collections.singletonList(annotationValue.asString()); } return mediaTypes.contains(MediaType.APPLICATION_JSON) || mediaTypes.contains(MediaType.APPLICATION_JSON_PATCH_JSON); } return false; }
class ResteasyCommonConfigGzip { /** * If gzip is enabled */ @ConfigItem public boolean enabled; /** * Maximum deflated file bytes size * <p> * If the limit is exceeded, Resteasy will return Response * with status 413("Request Entity Too Large") */ @ConfigItem(defaultValue = "10M") public MemorySize maxInput; }
class ResteasyCommonConfigGzip { /** * If gzip is enabled */ @ConfigItem public boolean enabled; /** * Maximum deflated file bytes size * <p> * If the limit is exceeded, Resteasy will return Response * with status 413("Request Entity Too Large") */ @ConfigItem(defaultValue = "10M") public MemorySize maxInput; }
nit: We should not strictly cast it to ArrayList<String> here, instead keep it open to generic `List<String>` here. ``` List<String> expandParam = ((Collection<?>) parameters[paramIndex]).stream().map(Object::toString).collect(Collectors.toList()); ```
public Object execute(final Object[] parameters) { final CosmosParameterAccessor accessor = new CosmosParameterParameterAccessor(getQueryMethod(), parameters); final ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor); String expandedQuery = query; List<SqlParameter> sqlParameters = new ArrayList<>(); for (int paramIndex = 0; paramIndex < parameters.length; paramIndex++) { Parameter queryParam = getQueryMethod().getParameters().getParameter(paramIndex); if (parameters[paramIndex] instanceof Collection) { ArrayList<String> expandParam = (ArrayList<String>) ((Collection<?>) parameters[paramIndex]).stream() .map(Object::toString).collect(Collectors.toList()); List<String> expandedParamKeys = new ArrayList<>(); for (int arrayIndex = 0; arrayIndex < expandParam.size(); arrayIndex++) { String paramName = "@" + queryParam.getName().orElse("") + arrayIndex; expandedParamKeys.add(paramName); sqlParameters.add(new SqlParameter(paramName, toCosmosDbValue(expandParam.get(arrayIndex)))); } expandedQuery = expandedQuery.replaceAll("@" + queryParam.getName().orElse(""), String.join(",", expandedParamKeys)); } else { if (!Pageable.class.isAssignableFrom(queryParam.getType()) && !Sort.class.isAssignableFrom(queryParam.getType())) { sqlParameters.add(new SqlParameter("@" + queryParam.getName().orElse(""), toCosmosDbValue(parameters[paramIndex]))); } } } SqlQuerySpec querySpec = new SqlQuerySpec(expandedQuery, sqlParameters); if (isPageQuery()) { return this.operations.runPaginationQuery(querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } else if (isSliceQuery()) { return this.operations.runSliceQuery( querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } else if (isCountQuery()) { final String container = ((CosmosEntityMetadata<?>) getQueryMethod().getEntityInformation()).getContainerName(); return this.operations.count(querySpec, container); } else { return this.operations.runQuery(querySpec, accessor.getSort(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } }
ArrayList<String> expandParam = (ArrayList<String>) ((Collection<?>) parameters[paramIndex]).stream()
public Object execute(final Object[] parameters) { final CosmosParameterAccessor accessor = new CosmosParameterParameterAccessor(getQueryMethod(), parameters); final ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor); String expandedQuery = query; List<SqlParameter> sqlParameters = new ArrayList<>(); for (int paramIndex = 0; paramIndex < parameters.length; paramIndex++) { Parameter queryParam = getQueryMethod().getParameters().getParameter(paramIndex); if (parameters[paramIndex] instanceof Collection) { List<String> expandParam = ((Collection<?>) parameters[paramIndex]).stream() .map(Object::toString).collect(Collectors.toList()); List<String> expandedParamKeys = new ArrayList<>(); for (int arrayIndex = 0; arrayIndex < expandParam.size(); arrayIndex++) { String paramName = "@" + queryParam.getName().orElse("") + arrayIndex; expandedParamKeys.add(paramName); sqlParameters.add(new SqlParameter(paramName, toCosmosDbValue(expandParam.get(arrayIndex)))); } expandedQuery = expandedQuery.replaceAll("@" + queryParam.getName().orElse(""), String.join(",", expandedParamKeys)); } else { if (!Pageable.class.isAssignableFrom(queryParam.getType()) && !Sort.class.isAssignableFrom(queryParam.getType())) { sqlParameters.add(new SqlParameter("@" + queryParam.getName().orElse(""), toCosmosDbValue(parameters[paramIndex]))); } } } SqlQuerySpec querySpec = new SqlQuerySpec(expandedQuery, sqlParameters); if (isPageQuery()) { return this.operations.runPaginationQuery(querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } else if (isSliceQuery()) { return this.operations.runSliceQuery( querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } else if (isCountQuery()) { final String container = ((CosmosEntityMetadata<?>) getQueryMethod().getEntityInformation()).getContainerName(); return this.operations.count(querySpec, container); } else { return this.operations.runQuery(querySpec, accessor.getSort(), processor.getReturnedType().getDomainType(), processor.getReturnedType().getReturnedType()); } }
class StringBasedCosmosQuery extends AbstractCosmosQuery { private static final Pattern COUNT_QUERY_PATTERN = Pattern.compile("^\\s*select\\s+value\\s+count.*", Pattern.CASE_INSENSITIVE); private final String query; /** * Constructor * @param queryMethod the CosmosQueryMethod * @param dbOperations the CosmosOperations */ public StringBasedCosmosQuery(CosmosQueryMethod queryMethod, CosmosOperations dbOperations) { super(queryMethod, dbOperations); this.query = queryMethod.getQueryAnnotation(); } @Override protected CosmosQuery createQuery(CosmosParameterAccessor accessor) { return null; } @Override @Override protected boolean isDeleteQuery() { return false; } @Override protected boolean isExistsQuery() { return false; } @Override protected boolean isCountQuery() { return isCountQuery(query, getQueryMethod().getReturnedObjectType()); } static boolean isCountQuery(String query, Class<?> returnedType) { if (isCountQueryReturnType(returnedType)) { return COUNT_QUERY_PATTERN.matcher(query).matches(); } else { return false; } } private static boolean isCountQueryReturnType(Class<?> returnedType) { return returnedType == Long.class || returnedType == long.class || returnedType == Integer.class || returnedType == int.class; } }
class StringBasedCosmosQuery extends AbstractCosmosQuery { private static final Pattern COUNT_QUERY_PATTERN = Pattern.compile("^\\s*select\\s+value\\s+count.*", Pattern.CASE_INSENSITIVE); private final String query; /** * Constructor * @param queryMethod the CosmosQueryMethod * @param dbOperations the CosmosOperations */ public StringBasedCosmosQuery(CosmosQueryMethod queryMethod, CosmosOperations dbOperations) { super(queryMethod, dbOperations); this.query = queryMethod.getQueryAnnotation(); } @Override protected CosmosQuery createQuery(CosmosParameterAccessor accessor) { return null; } @Override @Override protected boolean isDeleteQuery() { return false; } @Override protected boolean isExistsQuery() { return false; } @Override protected boolean isCountQuery() { return isCountQuery(query, getQueryMethod().getReturnedObjectType()); } static boolean isCountQuery(String query, Class<?> returnedType) { if (isCountQueryReturnType(returnedType)) { return COUNT_QUERY_PATTERN.matcher(query).matches(); } else { return false; } } private static boolean isCountQueryReturnType(Class<?> returnedType) { return returnedType == Long.class || returnedType == long.class || returnedType == Integer.class || returnedType == int.class; } }
I'm not sure we should use Optional here if we pass the null further to Options() anyway. Notice that in case when optional is empty the defaultValue is assigned to null anyway
private static List<Option> extractOptions(boolean isPython) { List<Option> options = new ArrayList<>(); for (Method method : FlinkPipelineOptions.class.getDeclaredMethods()) { String name; String description; String defaultValue = null; name = method.getName(); if (name.matches("^(get|is).*")) { name = name.replaceFirst("^(get|is)", ""); if (isPython) { name = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, name); } else { name = Character.toLowerCase(name.charAt(0)) + name.substring(1); } Description descriptionAnnotation = method.getAnnotation(Description.class); if (descriptionAnnotation == null) { throw new RuntimeException( "All pipeline options should have a description. Please add one for " + name); } description = descriptionAnnotation.value(); Optional<String> defaultValueFromAnnotation = getDefaultValueFromAnnotation(method); if (defaultValueFromAnnotation.isPresent()) { defaultValue = defaultValueFromAnnotation.get(); } options.add(new Option(name, description, defaultValue)); } } options.sort(Comparator.comparing(option -> option.name)); return options; }
Optional<String> defaultValueFromAnnotation = getDefaultValueFromAnnotation(method);
private static List<Option> extractOptions(boolean isPython) { List<Option> options = new ArrayList<>(); for (Method method : FlinkPipelineOptions.class.getDeclaredMethods()) { String name; String description; String defaultValue = null; name = method.getName(); if (name.matches("^(get|is).*")) { name = name.replaceFirst("^(get|is)", ""); if (isPython) { name = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, name); } else { name = Character.toLowerCase(name.charAt(0)) + name.substring(1); } Description descriptionAnnotation = method.getAnnotation(Description.class); if (descriptionAnnotation == null) { throw new RuntimeException( "All pipeline options should have a description. Please add one for " + name); } description = descriptionAnnotation.value(); Optional<String> defaultValueFromAnnotation = getDefaultValueFromAnnotation(method); if (defaultValueFromAnnotation.isPresent()) { defaultValue = defaultValueFromAnnotation.get(); } options.add(new Option(name, description, defaultValue)); } } options.sort(Comparator.comparing(option -> option.name)); return options; }
class methods. */
class methods. */
This PR now looks great, after we fix this one, it is time to merge it, I suppose.
public static List<SqlNode> convertOrderByItems(final Collection<OrderByItemSegment> orderByItems) { List<SqlNode> sqlNodes = Lists.newArrayList(); for (OrderByItemSegment orderByItemSegment : orderByItems) { Optional<SqlNode> optional = Optional.empty(); if (orderByItemSegment instanceof ColumnOrderByItemSegment) { optional = new ColumnOrderByItemSqlNodeConverter().convert((ColumnOrderByItemSegment) orderByItemSegment); } else if (orderByItemSegment instanceof ExpressionOrderByItemSegment) { throw new UnsupportedOperationException("unsupported ExpressionOrderByItemSegment"); } else if (orderByItemSegment instanceof IndexOrderByItemSegment) { throw new UnsupportedOperationException("unsupported IndexOrderByItemSegment"); } else if (orderByItemSegment instanceof TextOrderByItemSegment) { throw new UnsupportedOperationException("unsupported TextOrderByItemSegment"); } if (optional.isPresent()) { sqlNodes.add(optional.get()); } } return sqlNodes; }
List<SqlNode> sqlNodes = Lists.newArrayList();
public static List<SqlNode> convertOrderByItems(final Collection<OrderByItemSegment> orderByItems) { List<SqlNode> sqlNodes = Lists.newArrayList(); for (OrderByItemSegment orderByItemSegment : orderByItems) { Optional<SqlNode> optional = Optional.empty(); if (orderByItemSegment instanceof ColumnOrderByItemSegment) { optional = new ColumnOrderByItemSqlNodeConverter().convert((ColumnOrderByItemSegment) orderByItemSegment); } else if (orderByItemSegment instanceof ExpressionOrderByItemSegment) { throw new UnsupportedOperationException("unsupported ExpressionOrderByItemSegment"); } else if (orderByItemSegment instanceof IndexOrderByItemSegment) { throw new UnsupportedOperationException("unsupported IndexOrderByItemSegment"); } else if (orderByItemSegment instanceof TextOrderByItemSegment) { throw new UnsupportedOperationException("unsupported TextOrderByItemSegment"); } if (optional.isPresent()) { sqlNodes.add(optional.get()); } } return sqlNodes; }
class SqlNodeConverterUtil { /** * Convert order by items. * @param orderByItems order by item list. * @return a collection of order by item <code>SqlNode</code> */ }
class SqlNodeConverterUtil { /** * Convert order by items. * @param orderByItems order by item list. * @return a collection of order by item <code>SqlNode</code> */ }
`cost={}s`, same as the info log.
private void schedule() { for (Iterator<Map.Entry<PartitionIdentifier, CompactionContext>> iterator = runningCompactions.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<PartitionIdentifier, CompactionContext> entry = iterator.next(); PartitionIdentifier partition = entry.getKey(); CompactionContext context = entry.getValue(); if (context.compactionFinishedOnBE() && !context.transactionHasCommitted()) { try { commitCompaction(partition, context); } catch (Exception e) { LOG.error("Fail to commit compaction. {} error={}", context.getDebugString(), e.getMessage()); iterator.remove(); context.setFinishTs(System.currentTimeMillis()); failHistory.offer(CompactionRecord.build(context, e.getMessage())); compactionManager.enableCompactionAfter(partition, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); try { transactionMgr.abortTransaction(partition.getDbId(), context.getTxnId(), e.getMessage()); } catch (UserException ex) { LOG.error("Fail to abort txn " + context.getTxnId(), ex); } continue; } } if (context.transactionHasCommitted() && context.waitTransactionVisible(100, TimeUnit.MILLISECONDS)) { iterator.remove(); context.setFinishTs(System.currentTimeMillis()); history.offer(CompactionRecord.build(context)); long cost = context.getFinishTs() - context.getStartTs(); if (cost >= /*60 minutes=*/3600000) { LOG.info("Removed published compaction. {} cost={}s running={}", context.getDebugString(), cost / 1000, runningCompactions.size()); } else if (LOG.isDebugEnabled()) { LOG.debug("Removed published compaction. {} cost={}ms running={}", context.getDebugString(), cost, runningCompactions.size()); } compactionManager.enableCompactionAfter(partition, MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS); } } int index = 0; int compactionLimit = compactionTaskLimit(); int numRunningTasks = runningCompactions.values().stream().mapToInt(CompactionContext::getNumCompactionTasks).sum(); if (numRunningTasks >= compactionLimit) { return; } List<PartitionIdentifier> partitions = compactionManager.choosePartitionsToCompact(runningCompactions.keySet()); while (numRunningTasks < compactionLimit && index < partitions.size()) { PartitionIdentifier partition = partitions.get(index++); CompactionContext context = startCompaction(partition); if (context == null) { continue; } numRunningTasks += context.getNumCompactionTasks(); runningCompactions.put(partition, context); if (LOG.isDebugEnabled()) { LOG.debug("Created new compaction job. partition={} txnId={}", partition, context.getTxnId()); } } }
LOG.debug("Removed published compaction. {} cost={}ms running={}", context.getDebugString(),
private void schedule() { for (Iterator<Map.Entry<PartitionIdentifier, CompactionContext>> iterator = runningCompactions.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<PartitionIdentifier, CompactionContext> entry = iterator.next(); PartitionIdentifier partition = entry.getKey(); CompactionContext context = entry.getValue(); if (context.compactionFinishedOnBE() && !context.transactionHasCommitted()) { try { commitCompaction(partition, context); } catch (Exception e) { LOG.error("Fail to commit compaction. {} error={}", context.getDebugString(), e.getMessage()); iterator.remove(); context.setFinishTs(System.currentTimeMillis()); failHistory.offer(CompactionRecord.build(context, e.getMessage())); compactionManager.enableCompactionAfter(partition, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); try { transactionMgr.abortTransaction(partition.getDbId(), context.getTxnId(), e.getMessage()); } catch (UserException ex) { LOG.error("Fail to abort txn " + context.getTxnId(), ex); } continue; } } if (context.transactionHasCommitted() && context.waitTransactionVisible(100, TimeUnit.MILLISECONDS)) { iterator.remove(); context.setFinishTs(System.currentTimeMillis()); history.offer(CompactionRecord.build(context)); long cost = context.getFinishTs() - context.getStartTs(); if (cost >= /*60 minutes=*/3600000) { LOG.info("Removed published compaction. {} cost={}s running={}", context.getDebugString(), cost / 1000, runningCompactions.size()); } else if (LOG.isDebugEnabled()) { LOG.debug("Removed published compaction. {} cost={}s running={}", context.getDebugString(), cost / 1000, runningCompactions.size()); } compactionManager.enableCompactionAfter(partition, MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS); } } int index = 0; int compactionLimit = compactionTaskLimit(); int numRunningTasks = runningCompactions.values().stream().mapToInt(CompactionContext::getNumCompactionTasks).sum(); if (numRunningTasks >= compactionLimit) { return; } List<PartitionIdentifier> partitions = compactionManager.choosePartitionsToCompact(runningCompactions.keySet()); while (numRunningTasks < compactionLimit && index < partitions.size()) { PartitionIdentifier partition = partitions.get(index++); CompactionContext context = startCompaction(partition); if (context == null) { continue; } numRunningTasks += context.getNumCompactionTasks(); runningCompactions.put(partition, context); if (LOG.isDebugEnabled()) { LOG.debug("Created new compaction job. partition={} txnId={}", partition, context.getTxnId()); } } }
class CompactionScheduler extends Daemon { private static final Logger LOG = LogManager.getLogger(CompactionScheduler.class); private static final String HOST_NAME = FrontendOptions.getLocalHostAddress(); private static final long LOOP_INTERVAL_MS = 500L; private static final long TXN_TIMEOUT_SECOND = 86400L; private static final long MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS = 3000L; private static final long MIN_COMPACTION_INTERVAL_MS_ON_FAILURE = 6000L; private static final long PARTITION_CLEAN_INTERVAL_SECOND = 30; private final CompactionManager compactionManager; private final SystemInfoService systemInfoService; private final GlobalTransactionMgr transactionMgr; private final GlobalStateMgr stateMgr; private final ConcurrentHashMap<PartitionIdentifier, CompactionContext> runningCompactions; private final SynchronizedCircularQueue<CompactionRecord> history; private final SynchronizedCircularQueue<CompactionRecord> failHistory; private boolean finishedWaiting = false; private long waitTxnId = -1; private long lastPartitionCleanTime; CompactionScheduler(@NotNull CompactionManager compactionManager, @NotNull SystemInfoService systemInfoService, @NotNull GlobalTransactionMgr transactionMgr, @NotNull GlobalStateMgr stateMgr) { super("COMPACTION_DISPATCH", LOOP_INTERVAL_MS); this.compactionManager = compactionManager; this.systemInfoService = systemInfoService; this.transactionMgr = transactionMgr; this.stateMgr = stateMgr; this.runningCompactions = new ConcurrentHashMap<>(); this.lastPartitionCleanTime = System.currentTimeMillis(); this.history = new SynchronizedCircularQueue<>(Config.lake_compaction_history_size); this.failHistory = new SynchronizedCircularQueue<>(Config.lake_compaction_fail_history_size); } @Override protected void runOneCycle() { cleanPartition(); if (stateMgr.isLeader() && stateMgr.isReady() && allCommittedTransactionsBeforeRestartHaveFinished()) { schedule(); history.changeMaxSize(Config.lake_compaction_history_size); failHistory.changeMaxSize(Config.lake_compaction_fail_history_size); } } private boolean allCommittedTransactionsBeforeRestartHaveFinished() { if (finishedWaiting) { return true; } long minActiveTxnId = transactionMgr.getMinActiveTxnId(); if (waitTxnId < 0) { waitTxnId = transactionMgr.getTransactionIDGenerator().getNextTransactionId(); } finishedWaiting = waitTxnId <= minActiveTxnId; return finishedWaiting; } private int compactionTaskLimit() { if (Config.lake_compaction_max_tasks >= 0) { return Config.lake_compaction_max_tasks; } return systemInfoService.getAliveBackendNumber() * 16; } private void cleanPartition() { long now = System.currentTimeMillis(); if (now - lastPartitionCleanTime >= PARTITION_CLEAN_INTERVAL_SECOND * 1000L) { compactionManager.getAllPartitions() .stream() .filter(p -> !isPartitionExist(p)) .filter(p -> !runningCompactions.containsKey(p)) .forEach(compactionManager::removePartition); lastPartitionCleanTime = now; } } private boolean isPartitionExist(PartitionIdentifier partition) { Database db = stateMgr.getDb(partition.getDbId()); if (db == null) { return false; } db.readLock(); try { OlapTable table = (OlapTable) db.getTable(partition.getTableId()); return table != null && table.getPartition(partition.getPartitionId()) != null; } finally { db.readUnlock(); } } private CompactionContext startCompaction(PartitionIdentifier partitionIdentifier) { Database db = stateMgr.getDb(partitionIdentifier.getDbId()); if (db == null) { compactionManager.removePartition(partitionIdentifier); return null; } if (!db.tryReadLock(50, TimeUnit.MILLISECONDS)) { LOG.info("Skipped partition compaction due to get database lock timeout"); compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } long txnId; long currentVersion; OlapTable table; Partition partition; Map<Long, List<Long>> beToTablets; try { table = (OlapTable) db.getTable(partitionIdentifier.getTableId()); if (table != null && table.getState() == OlapTable.OlapTableState.SCHEMA_CHANGE) { compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } partition = (table != null) ? table.getPartition(partitionIdentifier.getPartitionId()) : null; if (partition == null) { compactionManager.removePartition(partitionIdentifier); return null; } currentVersion = partition.getVisibleVersion(); beToTablets = collectPartitionTablets(partition); if (beToTablets.isEmpty()) { compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } txnId = beginTransaction(partitionIdentifier); } catch (BeginTransactionException | AnalysisException | LabelAlreadyUsedException | DuplicatedRequestException e) { LOG.error("Fail to create transaction for compaction job. {}", e.getMessage()); return null; } catch (Throwable e) { LOG.error("Unknown error: {}", e.getMessage()); return null; } finally { db.readUnlock(); } String partitionName = String.format("%s.%s.%s", db.getFullName(), table.getName(), partition.getName()); CompactionContext context = new CompactionContext(partitionName, txnId, System.currentTimeMillis()); context.setBeToTablets(beToTablets); long nextCompactionInterval = MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS; try { List<Future<CompactResponse>> futures = compactTablets(currentVersion, beToTablets, txnId); context.setResponseList(futures); return context; } catch (Exception e) { LOG.error(e); nextCompactionInterval = MIN_COMPACTION_INTERVAL_MS_ON_FAILURE; abortTransactionIgnoreError(db.getId(), txnId, e.getMessage()); context.setFinishTs(System.currentTimeMillis()); failHistory.offer(CompactionRecord.build(context, e.getMessage())); return null; } finally { compactionManager.enableCompactionAfter(partitionIdentifier, nextCompactionInterval); } } @NotNull private List<Future<CompactResponse>> compactTablets(long currentVersion, Map<Long, List<Long>> beToTablets, long txnId) throws UserException { List<Future<CompactResponse>> futures = Lists.newArrayListWithCapacity(beToTablets.size()); for (Map.Entry<Long, List<Long>> entry : beToTablets.entrySet()) { Backend backend = systemInfoService.getBackend(entry.getKey()); if (backend == null) { throw new UserException("Backend " + entry.getKey() + " has been dropped"); } CompactRequest request = new CompactRequest(); request.tabletIds = entry.getValue(); request.txnId = txnId; request.version = currentVersion; LakeService service = BrpcProxy.getLakeService(backend.getHost(), backend.getBrpcPort()); futures.add(service.compact(request)); } return futures; } @NotNull private Map<Long, List<Long>> collectPartitionTablets(Partition partition) { List<MaterializedIndex> visibleIndexes = partition.getMaterializedIndices(MaterializedIndex.IndexExtState.VISIBLE); Map<Long, List<Long>> beToTablets = new HashMap<>(); for (MaterializedIndex index : visibleIndexes) { for (Tablet tablet : index.getTablets()) { Long beId = Utils.chooseBackend((LakeTablet) tablet); if (beId == null) { beToTablets.clear(); return beToTablets; } beToTablets.computeIfAbsent(beId, k -> Lists.newArrayList()).add(tablet.getId()); } } return beToTablets; } private long beginTransaction(PartitionIdentifier partition) throws BeginTransactionException, AnalysisException, LabelAlreadyUsedException, DuplicatedRequestException { long dbId = partition.getDbId(); long tableId = partition.getTableId(); long partitionId = partition.getPartitionId(); long currentTs = System.currentTimeMillis(); TransactionState.LoadJobSourceType loadJobSourceType = TransactionState.LoadJobSourceType.LAKE_COMPACTION; TransactionState.TxnSourceType txnSourceType = TransactionState.TxnSourceType.FE; TransactionState.TxnCoordinator coordinator = new TransactionState.TxnCoordinator(txnSourceType, HOST_NAME); String label = String.format("COMPACTION_%d-%d-%d-%d", dbId, tableId, partitionId, currentTs); return transactionMgr.beginTransaction(dbId, Lists.newArrayList(tableId), label, coordinator, loadJobSourceType, TXN_TIMEOUT_SECOND); } private void commitCompaction(PartitionIdentifier partition, CompactionContext context) throws UserException, ExecutionException, InterruptedException { Preconditions.checkState(context.compactionFinishedOnBE()); for (Future<CompactResponse> responseFuture : context.getResponseList()) { CompactResponse response = responseFuture.get(); if (response != null && CollectionUtils.isNotEmpty(response.failedTablets)) { if (response.status != null && CollectionUtils.isNotEmpty(response.status.errorMsgs)) { throw new UserException(response.status.errorMsgs.get(0)); } else { throw new UserException("Fail to compact tablet " + response.failedTablets.get(0)); } } } List<TabletCommitInfo> commitInfoList = Lists.newArrayList(); for (Map.Entry<Long, List<Long>> entry : context.getBeToTablets().entrySet()) { for (Long tabletId : entry.getValue()) { commitInfoList.add(new TabletCommitInfo(tabletId, entry.getKey())); } } Database db = stateMgr.getDb(partition.getDbId()); if (db == null) { throw new MetaNotFoundException("database not exist"); } if (LOG.isDebugEnabled()) { LOG.debug("Committing compaction transaction. partition={} txnId={}", partition, context.getTxnId()); } VisibleStateWaiter waiter; db.writeLock(); try { waiter = transactionMgr.commitTransaction(db.getId(), context.getTxnId(), commitInfoList, Lists.newArrayList()); } finally { db.writeUnlock(); } context.setVisibleStateWaiter(waiter); context.setCommitTs(System.currentTimeMillis()); } private void abortTransactionIgnoreError(long dbId, long txnId, String reason) { try { transactionMgr.abortTransaction(dbId, txnId, reason); } catch (UserException ex) { LOG.error(ex); } } @NotNull List<CompactionRecord> getHistory() { ImmutableList.Builder<CompactionRecord> builder = ImmutableList.builder(); history.forEach(builder::add); failHistory.forEach(builder::add); for (CompactionContext context : runningCompactions.values()) { builder.add(CompactionRecord.build(context)); } return builder.build(); } private static class SynchronizedCircularQueue<E> { private CircularFifoQueue<E> q; SynchronizedCircularQueue(int size) { q = new CircularFifoQueue<>(size); } synchronized void changeMaxSize(int newMaxSize) { if (newMaxSize == q.maxSize()) { return; } CircularFifoQueue<E> newQ = new CircularFifoQueue<>(newMaxSize); for (E e : q) { newQ.offer(e); } q = newQ; } synchronized void offer(E e) { q.offer(e); } synchronized void forEach(Consumer<? super E> consumer) { q.forEach(consumer); } } }
class CompactionScheduler extends Daemon { private static final Logger LOG = LogManager.getLogger(CompactionScheduler.class); private static final String HOST_NAME = FrontendOptions.getLocalHostAddress(); private static final long LOOP_INTERVAL_MS = 500L; private static final long TXN_TIMEOUT_SECOND = 86400L; private static final long MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS = 3000L; private static final long MIN_COMPACTION_INTERVAL_MS_ON_FAILURE = 6000L; private static final long PARTITION_CLEAN_INTERVAL_SECOND = 30; private final CompactionManager compactionManager; private final SystemInfoService systemInfoService; private final GlobalTransactionMgr transactionMgr; private final GlobalStateMgr stateMgr; private final ConcurrentHashMap<PartitionIdentifier, CompactionContext> runningCompactions; private final SynchronizedCircularQueue<CompactionRecord> history; private final SynchronizedCircularQueue<CompactionRecord> failHistory; private boolean finishedWaiting = false; private long waitTxnId = -1; private long lastPartitionCleanTime; CompactionScheduler(@NotNull CompactionManager compactionManager, @NotNull SystemInfoService systemInfoService, @NotNull GlobalTransactionMgr transactionMgr, @NotNull GlobalStateMgr stateMgr) { super("COMPACTION_DISPATCH", LOOP_INTERVAL_MS); this.compactionManager = compactionManager; this.systemInfoService = systemInfoService; this.transactionMgr = transactionMgr; this.stateMgr = stateMgr; this.runningCompactions = new ConcurrentHashMap<>(); this.lastPartitionCleanTime = System.currentTimeMillis(); this.history = new SynchronizedCircularQueue<>(Config.lake_compaction_history_size); this.failHistory = new SynchronizedCircularQueue<>(Config.lake_compaction_fail_history_size); } @Override protected void runOneCycle() { cleanPartition(); if (stateMgr.isLeader() && stateMgr.isReady() && allCommittedTransactionsBeforeRestartHaveFinished()) { schedule(); history.changeMaxSize(Config.lake_compaction_history_size); failHistory.changeMaxSize(Config.lake_compaction_fail_history_size); } } private boolean allCommittedTransactionsBeforeRestartHaveFinished() { if (finishedWaiting) { return true; } long minActiveTxnId = transactionMgr.getMinActiveTxnId(); if (waitTxnId < 0) { waitTxnId = transactionMgr.getTransactionIDGenerator().getNextTransactionId(); } finishedWaiting = waitTxnId <= minActiveTxnId; return finishedWaiting; } private int compactionTaskLimit() { if (Config.lake_compaction_max_tasks >= 0) { return Config.lake_compaction_max_tasks; } return systemInfoService.getAliveBackendNumber() * 16; } private void cleanPartition() { long now = System.currentTimeMillis(); if (now - lastPartitionCleanTime >= PARTITION_CLEAN_INTERVAL_SECOND * 1000L) { compactionManager.getAllPartitions() .stream() .filter(p -> !isPartitionExist(p)) .filter(p -> !runningCompactions.containsKey(p)) .forEach(compactionManager::removePartition); lastPartitionCleanTime = now; } } private boolean isPartitionExist(PartitionIdentifier partition) { Database db = stateMgr.getDb(partition.getDbId()); if (db == null) { return false; } db.readLock(); try { OlapTable table = (OlapTable) db.getTable(partition.getTableId()); return table != null && table.getPartition(partition.getPartitionId()) != null; } finally { db.readUnlock(); } } private CompactionContext startCompaction(PartitionIdentifier partitionIdentifier) { Database db = stateMgr.getDb(partitionIdentifier.getDbId()); if (db == null) { compactionManager.removePartition(partitionIdentifier); return null; } if (!db.tryReadLock(50, TimeUnit.MILLISECONDS)) { LOG.info("Skipped partition compaction due to get database lock timeout"); compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } long txnId; long currentVersion; OlapTable table; Partition partition; Map<Long, List<Long>> beToTablets; try { table = (OlapTable) db.getTable(partitionIdentifier.getTableId()); if (table != null && table.getState() == OlapTable.OlapTableState.SCHEMA_CHANGE) { compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } partition = (table != null) ? table.getPartition(partitionIdentifier.getPartitionId()) : null; if (partition == null) { compactionManager.removePartition(partitionIdentifier); return null; } currentVersion = partition.getVisibleVersion(); beToTablets = collectPartitionTablets(partition); if (beToTablets.isEmpty()) { compactionManager.enableCompactionAfter(partitionIdentifier, MIN_COMPACTION_INTERVAL_MS_ON_FAILURE); return null; } txnId = beginTransaction(partitionIdentifier); } catch (BeginTransactionException | AnalysisException | LabelAlreadyUsedException | DuplicatedRequestException e) { LOG.error("Fail to create transaction for compaction job. {}", e.getMessage()); return null; } catch (Throwable e) { LOG.error("Unknown error: {}", e.getMessage()); return null; } finally { db.readUnlock(); } String partitionName = String.format("%s.%s.%s", db.getFullName(), table.getName(), partition.getName()); CompactionContext context = new CompactionContext(partitionName, txnId, System.currentTimeMillis()); context.setBeToTablets(beToTablets); long nextCompactionInterval = MIN_COMPACTION_INTERVAL_MS_ON_SUCCESS; try { List<Future<CompactResponse>> futures = compactTablets(currentVersion, beToTablets, txnId); context.setResponseList(futures); return context; } catch (Exception e) { LOG.error(e); nextCompactionInterval = MIN_COMPACTION_INTERVAL_MS_ON_FAILURE; abortTransactionIgnoreError(db.getId(), txnId, e.getMessage()); context.setFinishTs(System.currentTimeMillis()); failHistory.offer(CompactionRecord.build(context, e.getMessage())); return null; } finally { compactionManager.enableCompactionAfter(partitionIdentifier, nextCompactionInterval); } } @NotNull private List<Future<CompactResponse>> compactTablets(long currentVersion, Map<Long, List<Long>> beToTablets, long txnId) throws UserException { List<Future<CompactResponse>> futures = Lists.newArrayListWithCapacity(beToTablets.size()); for (Map.Entry<Long, List<Long>> entry : beToTablets.entrySet()) { Backend backend = systemInfoService.getBackend(entry.getKey()); if (backend == null) { throw new UserException("Backend " + entry.getKey() + " has been dropped"); } CompactRequest request = new CompactRequest(); request.tabletIds = entry.getValue(); request.txnId = txnId; request.version = currentVersion; LakeService service = BrpcProxy.getLakeService(backend.getHost(), backend.getBrpcPort()); futures.add(service.compact(request)); } return futures; } @NotNull private Map<Long, List<Long>> collectPartitionTablets(Partition partition) { List<MaterializedIndex> visibleIndexes = partition.getMaterializedIndices(MaterializedIndex.IndexExtState.VISIBLE); Map<Long, List<Long>> beToTablets = new HashMap<>(); for (MaterializedIndex index : visibleIndexes) { for (Tablet tablet : index.getTablets()) { Long beId = Utils.chooseBackend((LakeTablet) tablet); if (beId == null) { beToTablets.clear(); return beToTablets; } beToTablets.computeIfAbsent(beId, k -> Lists.newArrayList()).add(tablet.getId()); } } return beToTablets; } private long beginTransaction(PartitionIdentifier partition) throws BeginTransactionException, AnalysisException, LabelAlreadyUsedException, DuplicatedRequestException { long dbId = partition.getDbId(); long tableId = partition.getTableId(); long partitionId = partition.getPartitionId(); long currentTs = System.currentTimeMillis(); TransactionState.LoadJobSourceType loadJobSourceType = TransactionState.LoadJobSourceType.LAKE_COMPACTION; TransactionState.TxnSourceType txnSourceType = TransactionState.TxnSourceType.FE; TransactionState.TxnCoordinator coordinator = new TransactionState.TxnCoordinator(txnSourceType, HOST_NAME); String label = String.format("COMPACTION_%d-%d-%d-%d", dbId, tableId, partitionId, currentTs); return transactionMgr.beginTransaction(dbId, Lists.newArrayList(tableId), label, coordinator, loadJobSourceType, TXN_TIMEOUT_SECOND); } private void commitCompaction(PartitionIdentifier partition, CompactionContext context) throws UserException, ExecutionException, InterruptedException { Preconditions.checkState(context.compactionFinishedOnBE()); for (Future<CompactResponse> responseFuture : context.getResponseList()) { CompactResponse response = responseFuture.get(); if (response != null && CollectionUtils.isNotEmpty(response.failedTablets)) { if (response.status != null && CollectionUtils.isNotEmpty(response.status.errorMsgs)) { throw new UserException(response.status.errorMsgs.get(0)); } else { throw new UserException("Fail to compact tablet " + response.failedTablets.get(0)); } } } List<TabletCommitInfo> commitInfoList = Lists.newArrayList(); for (Map.Entry<Long, List<Long>> entry : context.getBeToTablets().entrySet()) { for (Long tabletId : entry.getValue()) { commitInfoList.add(new TabletCommitInfo(tabletId, entry.getKey())); } } Database db = stateMgr.getDb(partition.getDbId()); if (db == null) { throw new MetaNotFoundException("database not exist"); } if (LOG.isDebugEnabled()) { LOG.debug("Committing compaction transaction. partition={} txnId={}", partition, context.getTxnId()); } VisibleStateWaiter waiter; db.writeLock(); try { waiter = transactionMgr.commitTransaction(db.getId(), context.getTxnId(), commitInfoList, Lists.newArrayList()); } finally { db.writeUnlock(); } context.setVisibleStateWaiter(waiter); context.setCommitTs(System.currentTimeMillis()); } private void abortTransactionIgnoreError(long dbId, long txnId, String reason) { try { transactionMgr.abortTransaction(dbId, txnId, reason); } catch (UserException ex) { LOG.error(ex); } } @NotNull List<CompactionRecord> getHistory() { ImmutableList.Builder<CompactionRecord> builder = ImmutableList.builder(); history.forEach(builder::add); failHistory.forEach(builder::add); for (CompactionContext context : runningCompactions.values()) { builder.add(CompactionRecord.build(context)); } return builder.build(); } private static class SynchronizedCircularQueue<E> { private CircularFifoQueue<E> q; SynchronizedCircularQueue(int size) { q = new CircularFifoQueue<>(size); } synchronized void changeMaxSize(int newMaxSize) { if (newMaxSize == q.maxSize()) { return; } CircularFifoQueue<E> newQ = new CircularFifoQueue<>(newMaxSize); for (E e : q) { newQ.offer(e); } q = newQ; } synchronized void offer(E e) { q.offer(e); } synchronized void forEach(Consumer<? super E> consumer) { q.forEach(consumer); } } }
> Now if this deferred creation actually gained something that would be a different story. But it does not here. That I can see anyway. Please correct me if I missed something
private void setUpDeploymentConfiguration() { if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) { project.getConfigurations().register(this.deploymentConfigurationName, configuration -> { Configuration enforcedPlatforms = this.getPlatformConfiguration(); configuration.extendsFrom(enforcedPlatforms); configuration.getDependencies().addAllLater(project.provider(() -> { ConditionalDependenciesEnabler cdEnabler = new ConditionalDependenciesEnabler(project, mode, enforcedPlatforms); final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions(); Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(), allExtensions); for (ExtensionDependency knownExtension : allExtensions) { if (knownExtension.isConditional()) { extensions.add(knownExtension); } } final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size()); final DependencyHandler dependencies = project.getDependencies(); final Set<Dependency> deploymentDependencies = new HashSet<>(); for (ExtensionDependency extension : extensions) { if (extension instanceof LocalExtensionDependency) { LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension; deploymentDependencies.add( dependencies.project(Collections.singletonMap("path", localExtensionDependency.findDeploymentModulePath()))); } else { if (!alreadyProcessed.add(extension.getExtensionId())) { continue; } deploymentDependencies.add(dependencies.create( extension.getDeploymentModule().getGroupId() + ":" + extension.getDeploymentModule().getArtifactId() + ":" + extension.getDeploymentModule().getVersion())); } } return deploymentDependencies; })); }); } }
project.getConfigurations().register(this.deploymentConfigurationName, configuration -> {
private void setUpDeploymentConfiguration() { if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) { project.getConfigurations().create(this.deploymentConfigurationName, configuration -> { Configuration enforcedPlatforms = this.getPlatformConfiguration(); configuration.extendsFrom(enforcedPlatforms); configuration.getDependencies().addAllLater(project.provider(() -> { ConditionalDependenciesEnabler cdEnabler = new ConditionalDependenciesEnabler(project, mode, enforcedPlatforms); final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions(); Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(), allExtensions); for (ExtensionDependency knownExtension : allExtensions) { if (knownExtension.isConditional()) { extensions.add(knownExtension); } } final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size()); final DependencyHandler dependencies = project.getDependencies(); final Set<Dependency> deploymentDependencies = new HashSet<>(); for (ExtensionDependency extension : extensions) { if (extension instanceof LocalExtensionDependency) { LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension; deploymentDependencies.add( dependencies.project(Collections.singletonMap("path", localExtensionDependency.findDeploymentModulePath()))); } else { if (!alreadyProcessed.add(extension.getExtensionId())) { continue; } deploymentDependencies.add(dependencies.create( extension.getDeploymentModule().getGroupId() + ":" + extension.getDeploymentModule().getArtifactId() + ":" + extension.getDeploymentModule().getVersion())); } } return deploymentDependencies; })); }); } }
class ApplicationDeploymentClasspathBuilder { private static String getRuntimeConfigName(LaunchMode mode, boolean base) { final StringBuilder sb = new StringBuilder(); sb.append("quarkus"); if (mode == LaunchMode.DEVELOPMENT) { sb.append("Dev"); } else if (mode == LaunchMode.TEST) { sb.append("Test"); } else { sb.append("Prod"); } if (base) { sb.append("Base"); } sb.append("RuntimeClasspathConfiguration"); return sb.toString(); } public static String getBaseRuntimeConfigName(LaunchMode mode) { return getRuntimeConfigName(mode, true); } public static String getFinalRuntimeConfigName(LaunchMode mode) { return getRuntimeConfigName(mode, false); } public static void initConfigurations(Project project) { final ConfigurationContainer configContainer = project.getConfigurations(); configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME) .extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST)) .extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL)) .extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT)) .extendsFrom( configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME), configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME), configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME) .withDependencies(annotationProcessors -> { Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils .duplicateConfiguration(project, configContainer .getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME)) .getResolvedConfiguration() .getResolvedArtifacts(); for (ResolvedArtifact artifact : compileClasspathArtifacts) { if ("quarkus-panache-common".equals(artifact.getName()) && "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) { project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME, "io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion()); } } }); } private final Project project; private final LaunchMode mode; private final String runtimeConfigurationName; private final String platformConfigurationName; private final String deploymentConfigurationName; /** * The platform configuration updates the PlatformImports, but since the PlatformImports don't * have a place to be stored in the project, they're stored here. The way that extensions are * tracked and conditional dependencies needs some attention, which will likely resolve this. */ private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>(); /** * The key used to look up the correct PlatformImports that matches the platformConfigurationName */ private final String platformImportName; public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) { this.project = project; this.mode = mode; this.runtimeConfigurationName = getFinalRuntimeConfigName(mode); this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName); this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName); this.platformImportName = project.getPath() + ":" + this.platformConfigurationName; setUpPlatformConfiguration(); setUpRuntimeConfiguration(); setUpDeploymentConfiguration(); } private void setUpPlatformConfiguration() { if (project.getConfigurations().findByName(this.platformConfigurationName) == null) { PlatformImportsImpl platformImports = ApplicationDeploymentClasspathBuilder.platformImports.computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl()); project.getConfigurations().register(this.platformConfigurationName, configuration -> { configuration.getDependencies().addAllLater(project.provider(() -> project.getConfigurations() .getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME) .getAllDependencies() .stream() .filter(dependency -> dependency instanceof ModuleDependency && ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency)) .collect(Collectors.toList()) )); configuration.getResolutionStrategy().eachDependency(d -> { ModuleIdentifier identifier = d.getTarget().getModule(); final String group = identifier.getGroup(); final String name = identifier.getName(); if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) { platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json", d.getTarget().getVersion()); } else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) { final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); dep.setExtension("properties"); dep.setType("properties"); dep.setName(name); final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency( group, name, d.getTarget().getVersion(), null); gradleDep.addArtifact(dep); for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep) .getResolvedConfiguration().getResolvedArtifacts()) { if (a.getName().equals(name)) { try { platformImports.addPlatformProperties(group, name, null, "properties", d.getTarget().getVersion(), a.getFile().toPath()); } catch (AppModelResolverException e) { throw new GradleException("Failed to import platform properties " + a.getFile(), e); } break; } } } }); }); } } private void setUpRuntimeConfiguration() { if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) { project.getConfigurations().register(this.runtimeConfigurationName, configuration -> configuration.extendsFrom( project.getConfigurations() .getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode)))); } } public Configuration getPlatformConfiguration() { return project.getConfigurations().getByName(this.platformConfigurationName); } private Configuration getRawRuntimeConfiguration() { return project.getConfigurations().getByName(this.runtimeConfigurationName); } /** * Forces deployment configuration to resolve to discover conditional dependencies. */ public Configuration getRuntimeConfiguration() { this.getDeploymentConfiguration().resolve(); return project.getConfigurations().getByName(this.runtimeConfigurationName); } public Configuration getDeploymentConfiguration() { return project.getConfigurations().getByName(this.deploymentConfigurationName); } /** * Forces the platform configuration to resolve and then uses that to populate platform imports. */ public PlatformImports getPlatformImports() { this.getPlatformConfiguration().getResolvedConfiguration(); return platformImports.get(this.platformImportName); } private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration, Collection<ExtensionDependency> knownExtensions) { Set<ExtensionDependency> firstLevelExtensions = new HashSet<>(); Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration() .getFirstLevelModuleDependencies(); Set<String> visitedArtifacts = new HashSet<>(); for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) { firstLevelExtensions .addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions)); } return firstLevelExtensions; } private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts, Collection<ExtensionDependency> knownExtensions) { String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName()); if (!visitedArtifacts.add(artifactKey)) { return Collections.emptySet(); } Set<ExtensionDependency> extensions = new LinkedHashSet<>(); ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(), dependency.getModuleVersion(), knownExtensions); if (extension != null) { extensions.add(extension); } else { for (ResolvedDependency child : dependency.getChildren()) { extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions)); } } return extensions; } private ExtensionDependency getExtensionOrNull(String group, String artifact, String version, Collection<ExtensionDependency> knownExtensions) { for (ExtensionDependency knownExtension : knownExtensions) { if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName()) && version.equals(knownExtension.getVersion())) { return knownExtension; } } return null; } }
class ApplicationDeploymentClasspathBuilder { private static String getRuntimeConfigName(LaunchMode mode, boolean base) { final StringBuilder sb = new StringBuilder(); sb.append("quarkus"); if (mode == LaunchMode.DEVELOPMENT) { sb.append("Dev"); } else if (mode == LaunchMode.TEST) { sb.append("Test"); } else { sb.append("Prod"); } if (base) { sb.append("Base"); } sb.append("RuntimeClasspathConfiguration"); return sb.toString(); } public static String getBaseRuntimeConfigName(LaunchMode mode) { return getRuntimeConfigName(mode, true); } public static String getFinalRuntimeConfigName(LaunchMode mode) { return getRuntimeConfigName(mode, false); } public static void initConfigurations(Project project) { final ConfigurationContainer configContainer = project.getConfigurations(); configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME) .extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST)) .extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL)) .extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT)) .extendsFrom( configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME), configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME), configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME) .withDependencies(annotationProcessors -> { Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils .duplicateConfiguration(project, configContainer .getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME)) .getResolvedConfiguration() .getResolvedArtifacts(); for (ResolvedArtifact artifact : compileClasspathArtifacts) { if ("quarkus-panache-common".equals(artifact.getName()) && "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) { project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME, "io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion()); } } }); } private final Project project; private final LaunchMode mode; private final String runtimeConfigurationName; private final String platformConfigurationName; private final String deploymentConfigurationName; /** * The platform configuration updates the PlatformImports, but since the PlatformImports don't * have a place to be stored in the project, they're stored here. The way that extensions are * tracked and conditional dependencies needs some attention, which will likely resolve this. */ private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>(); /** * The key used to look up the correct PlatformImports that matches the platformConfigurationName */ private final String platformImportName; public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) { this.project = project; this.mode = mode; this.runtimeConfigurationName = getFinalRuntimeConfigName(mode); this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName); this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName); this.platformImportName = project.getPath() + ":" + this.platformConfigurationName; setUpPlatformConfiguration(); setUpRuntimeConfiguration(); setUpDeploymentConfiguration(); } private void setUpPlatformConfiguration() { if (project.getConfigurations().findByName(this.platformConfigurationName) == null) { PlatformImportsImpl platformImports = ApplicationDeploymentClasspathBuilder.platformImports .computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl()); project.getConfigurations().create(this.platformConfigurationName, configuration -> { configuration.getDependencies().addAllLater(project.provider(() -> project.getConfigurations() .getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME) .getAllDependencies() .stream() .filter(dependency -> dependency instanceof ModuleDependency && ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency)) .collect(Collectors.toList()))); configuration.getResolutionStrategy().eachDependency(d -> { ModuleIdentifier identifier = d.getTarget().getModule(); final String group = identifier.getGroup(); final String name = identifier.getName(); if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) { platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json", d.getTarget().getVersion()); } else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) { final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); dep.setExtension("properties"); dep.setType("properties"); dep.setName(name); final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency( group, name, d.getTarget().getVersion(), null); gradleDep.addArtifact(dep); for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep) .getResolvedConfiguration().getResolvedArtifacts()) { if (a.getName().equals(name)) { try { platformImports.addPlatformProperties(group, name, null, "properties", d.getTarget().getVersion(), a.getFile().toPath()); } catch (AppModelResolverException e) { throw new GradleException("Failed to import platform properties " + a.getFile(), e); } break; } } } }); }); } } private void setUpRuntimeConfiguration() { if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) { project.getConfigurations().create(this.runtimeConfigurationName, configuration -> configuration.extendsFrom( project.getConfigurations() .getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode)))); } } public Configuration getPlatformConfiguration() { return project.getConfigurations().getByName(this.platformConfigurationName); } private Configuration getRawRuntimeConfiguration() { return project.getConfigurations().getByName(this.runtimeConfigurationName); } /** * Forces deployment configuration to resolve to discover conditional dependencies. */ public Configuration getRuntimeConfiguration() { this.getDeploymentConfiguration().resolve(); return project.getConfigurations().getByName(this.runtimeConfigurationName); } public Configuration getDeploymentConfiguration() { return project.getConfigurations().getByName(this.deploymentConfigurationName); } /** * Forces the platform configuration to resolve and then uses that to populate platform imports. */ public PlatformImports getPlatformImports() { this.getPlatformConfiguration().getResolvedConfiguration(); return platformImports.get(this.platformImportName); } private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration, Collection<ExtensionDependency> knownExtensions) { Set<ExtensionDependency> firstLevelExtensions = new HashSet<>(); Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration() .getFirstLevelModuleDependencies(); Set<String> visitedArtifacts = new HashSet<>(); for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) { firstLevelExtensions .addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions)); } return firstLevelExtensions; } private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts, Collection<ExtensionDependency> knownExtensions) { String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName()); if (!visitedArtifacts.add(artifactKey)) { return Collections.emptySet(); } Set<ExtensionDependency> extensions = new LinkedHashSet<>(); ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(), dependency.getModuleVersion(), knownExtensions); if (extension != null) { extensions.add(extension); } else { for (ResolvedDependency child : dependency.getChildren()) { extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions)); } } return extensions; } private ExtensionDependency getExtensionOrNull(String group, String artifact, String version, Collection<ExtensionDependency> knownExtensions) { for (ExtensionDependency knownExtension : knownExtensions) { if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName()) && version.equals(knownExtension.getVersion())) { return knownExtension; } } return null; } }
Shall we add a test case for this change?
public void onMessage(HttpCarbonMessage inboundMessage) { try { HttpResource httpResource; if (accessed(inboundMessage)) { httpResource = (HttpResource) inboundMessage.getProperty(HTTP_RESOURCE); extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); return; } httpResource = HttpDispatcher.findResource(httpServicesRegistry, inboundMessage); if (HttpDispatcher.shouldDiffer(httpResource)) { inboundMessage.setProperty(HTTP_RESOURCE, httpResource); inboundMessage.removeInboundContentListener(); return; } if (httpResource != null) { extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); } } catch (BallerinaException ex) { try { HttpUtil.handleFailure(inboundMessage, new BallerinaConnectorException(ex.getMessage(), ex.getCause())); } catch (Exception e) { log.error("Cannot handle error using the error handler for: " + e.getMessage(), e); } } }
inboundMessage.removeInboundContentListener();
public void onMessage(HttpCarbonMessage inboundMessage) { try { HttpResource httpResource; if (accessed(inboundMessage)) { httpResource = (HttpResource) inboundMessage.getProperty(HTTP_RESOURCE); extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); return; } httpResource = HttpDispatcher.findResource(httpServicesRegistry, inboundMessage); if (HttpDispatcher.shouldDiffer(httpResource)) { inboundMessage.setProperty(HTTP_RESOURCE, httpResource); inboundMessage.removeInboundContentListener(); return; } if (httpResource != null) { extractPropertiesAndStartResourceExecution(inboundMessage, httpResource); } } catch (BallerinaException ex) { try { HttpUtil.handleFailure(inboundMessage, new BallerinaConnectorException(ex.getMessage(), ex.getCause())); } catch (Exception e) { log.error("Cannot handle error using the error handler for: " + e.getMessage(), e); } } }
class BallerinaHTTPConnectorListener implements HttpConnectorListener { private static final Logger log = LoggerFactory.getLogger(BallerinaHTTPConnectorListener.class); protected static final String HTTP_RESOURCE = "httpResource"; private final HTTPServicesRegistry httpServicesRegistry; protected final Struct endpointConfig; public BallerinaHTTPConnectorListener(HTTPServicesRegistry httpServicesRegistry, Struct endpointConfig) { this.httpServicesRegistry = httpServicesRegistry; this.endpointConfig = endpointConfig; } @Override @Override public void onError(Throwable throwable) { log.error("Error in HTTP server connector: {}", throwable.getMessage()); } protected void extractPropertiesAndStartResourceExecution(HttpCarbonMessage inboundMessage, HttpResource httpResource) { boolean isTransactionInfectable = httpResource.isTransactionInfectable(); boolean isInterruptible = httpResource.isInterruptible(); Map<String, Object> properties = collectRequestProperties(inboundMessage, isTransactionInfectable, isInterruptible); BValue[] signatureParams = HttpDispatcher.getSignatureParameters(httpResource, inboundMessage, endpointConfig); Resource balResource = httpResource.getBalResource(); Optional<ObserverContext> observerContext = ObservabilityUtils.startServerObservation(SERVER_CONNECTOR_HTTP, httpResource.getParentService().getBalService().getServiceInfo(), balResource.getName(), null); observerContext.ifPresent(ctx -> { Map<String, String> httpHeaders = new HashMap<>(); inboundMessage.getHeaders().forEach(entry -> httpHeaders.put(entry.getKey(), entry.getValue())); ctx.addProperty(PROPERTY_TRACE_PROPERTIES, httpHeaders); ctx.addTag(TAG_KEY_HTTP_METHOD, (String) inboundMessage.getProperty(HttpConstants.HTTP_METHOD)); ctx.addTag(TAG_KEY_PROTOCOL, (String) inboundMessage.getProperty(HttpConstants.PROTOCOL)); ctx.addTag(TAG_KEY_HTTP_URL, (String) inboundMessage.getProperty(HttpConstants.REQUEST_URL)); }); CallableUnitCallback callback = new HttpCallableUnitCallback(inboundMessage); Executor.submit(balResource, callback, properties, observerContext.orElse(null), signatureParams); } protected boolean accessed(HttpCarbonMessage inboundMessage) { return inboundMessage.getProperty(HTTP_RESOURCE) != null; } private Map<String, Object> collectRequestProperties(HttpCarbonMessage inboundMessage, boolean isInfectable, boolean isInterruptible) { Map<String, Object> properties = new HashMap<>(); if (inboundMessage.getProperty(HttpConstants.SRC_HANDLER) != null) { Object srcHandler = inboundMessage.getProperty(HttpConstants.SRC_HANDLER); properties.put(HttpConstants.SRC_HANDLER, srcHandler); } String txnId = inboundMessage.getHeader(HttpConstants.HEADER_X_XID); String registerAtUrl = inboundMessage.getHeader(HttpConstants.HEADER_X_REGISTER_AT_URL); if (!isInfectable && txnId != null) { throw new BallerinaConnectorException("Cannot create transaction context: " + "resource is not transactionInfectable"); } if (isInfectable && txnId != null && registerAtUrl != null) { properties.put(Constants.GLOBAL_TRANSACTION_ID, txnId); properties.put(Constants.TRANSACTION_URL, registerAtUrl); return properties; } properties.put(HttpConstants.REMOTE_ADDRESS, inboundMessage.getProperty(HttpConstants.REMOTE_ADDRESS)); properties.put(HttpConstants.ORIGIN_HOST, inboundMessage.getHeader(HttpConstants.ORIGIN_HOST)); properties.put(HttpConstants.POOLED_BYTE_BUFFER_FACTORY, inboundMessage.getHeader(HttpConstants.POOLED_BYTE_BUFFER_FACTORY)); properties.put(Constants.IS_INTERRUPTIBLE, isInterruptible); return properties; } }
class BallerinaHTTPConnectorListener implements HttpConnectorListener { private static final Logger log = LoggerFactory.getLogger(BallerinaHTTPConnectorListener.class); protected static final String HTTP_RESOURCE = "httpResource"; private final HTTPServicesRegistry httpServicesRegistry; protected final Struct endpointConfig; public BallerinaHTTPConnectorListener(HTTPServicesRegistry httpServicesRegistry, Struct endpointConfig) { this.httpServicesRegistry = httpServicesRegistry; this.endpointConfig = endpointConfig; } @Override @Override public void onError(Throwable throwable) { log.error("Error in HTTP server connector: {}", throwable.getMessage()); } protected void extractPropertiesAndStartResourceExecution(HttpCarbonMessage inboundMessage, HttpResource httpResource) { boolean isTransactionInfectable = httpResource.isTransactionInfectable(); boolean isInterruptible = httpResource.isInterruptible(); Map<String, Object> properties = collectRequestProperties(inboundMessage, isTransactionInfectable, isInterruptible); BValue[] signatureParams = HttpDispatcher.getSignatureParameters(httpResource, inboundMessage, endpointConfig); Resource balResource = httpResource.getBalResource(); Optional<ObserverContext> observerContext = ObservabilityUtils.startServerObservation(SERVER_CONNECTOR_HTTP, httpResource.getParentService().getBalService().getServiceInfo(), balResource.getName(), null); observerContext.ifPresent(ctx -> { Map<String, String> httpHeaders = new HashMap<>(); inboundMessage.getHeaders().forEach(entry -> httpHeaders.put(entry.getKey(), entry.getValue())); ctx.addProperty(PROPERTY_TRACE_PROPERTIES, httpHeaders); ctx.addTag(TAG_KEY_HTTP_METHOD, (String) inboundMessage.getProperty(HttpConstants.HTTP_METHOD)); ctx.addTag(TAG_KEY_PROTOCOL, (String) inboundMessage.getProperty(HttpConstants.PROTOCOL)); ctx.addTag(TAG_KEY_HTTP_URL, (String) inboundMessage.getProperty(HttpConstants.REQUEST_URL)); }); CallableUnitCallback callback = new HttpCallableUnitCallback(inboundMessage); Executor.submit(balResource, callback, properties, observerContext.orElse(null), signatureParams); } protected boolean accessed(HttpCarbonMessage inboundMessage) { return inboundMessage.getProperty(HTTP_RESOURCE) != null; } private Map<String, Object> collectRequestProperties(HttpCarbonMessage inboundMessage, boolean isInfectable, boolean isInterruptible) { Map<String, Object> properties = new HashMap<>(); if (inboundMessage.getProperty(HttpConstants.SRC_HANDLER) != null) { Object srcHandler = inboundMessage.getProperty(HttpConstants.SRC_HANDLER); properties.put(HttpConstants.SRC_HANDLER, srcHandler); } String txnId = inboundMessage.getHeader(HttpConstants.HEADER_X_XID); String registerAtUrl = inboundMessage.getHeader(HttpConstants.HEADER_X_REGISTER_AT_URL); if (!isInfectable && txnId != null) { throw new BallerinaConnectorException("Cannot create transaction context: " + "resource is not transactionInfectable"); } if (isInfectable && txnId != null && registerAtUrl != null) { properties.put(Constants.GLOBAL_TRANSACTION_ID, txnId); properties.put(Constants.TRANSACTION_URL, registerAtUrl); return properties; } properties.put(HttpConstants.REMOTE_ADDRESS, inboundMessage.getProperty(HttpConstants.REMOTE_ADDRESS)); properties.put(HttpConstants.ORIGIN_HOST, inboundMessage.getHeader(HttpConstants.ORIGIN_HOST)); properties.put(HttpConstants.POOLED_BYTE_BUFFER_FACTORY, inboundMessage.getHeader(HttpConstants.POOLED_BYTE_BUFFER_FACTORY)); properties.put(Constants.IS_INTERRUPTIBLE, isInterruptible); return properties; } }
Is Funqy layered on top of this? One thing to consider is that Funqy needs to be consistent across all cloud platform on what JSON marshalling layer it uses. We can't use GSON for Funqy GCF and Jackson for Funqy Lambda.
public void accept(String event, Context context) throws Exception { if (!started) { throw new IOException(deploymentStatus); } if ((delegate == null && rawDelegate == null) || (delegate != null && rawDelegate != null)) { throw new IOException("We didn't found any BackgroundFunction or RawBackgroundFunction to run " + "(or there is multiple one and none selected inside your application.properties)"); } if (rawDelegate != null) { rawDelegate.accept(event, context); } else { ObjectMapper mapper = new ObjectMapper(); Object eventObj = mapper.readValue(event, parameterType); delegate.accept(eventObj, context); } }
Object eventObj = mapper.readValue(event, parameterType);
public void accept(String event, Context context) throws Exception { if (!started) { throw new IOException(deploymentStatus); } if ((delegate == null && rawDelegate == null) || (delegate != null && rawDelegate != null)) { throw new IOException("We didn't found any BackgroundFunction or RawBackgroundFunction to run " + "(or there is multiple one and none selected inside your application.properties)"); } if (rawDelegate != null) { rawDelegate.accept(event, context); } else { Gson gson = new Gson(); try { Object eventObj = gson.fromJson(event, parameterType); delegate.accept(eventObj, context); } catch (JsonParseException e) { throw new RuntimeException("Could not parse received event payload into type " + parameterType.getCanonicalName(), e); } } }
class QuarkusBackgroundFunction implements RawBackgroundFunction { protected static final String deploymentStatus; protected static boolean started = false; private static volatile BackgroundFunction delegate; private static volatile Class<?> parameterType; private static volatile RawBackgroundFunction rawDelegate; static { StringWriter error = new StringWriter(); PrintWriter errorWriter = new PrintWriter(error, true); if (Application.currentApplication() == null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(QuarkusBackgroundFunction.class.getClassLoader()); Class<?> appClass = Class.forName("io.quarkus.runner.ApplicationImpl"); String[] args = {}; Application app = (Application) appClass.getConstructor().newInstance(); app.start(args); errorWriter.println("Quarkus bootstrapped successfully."); started = true; } catch (Exception ex) { errorWriter.println("Quarkus bootstrap failed."); ex.printStackTrace(errorWriter); } finally { Thread.currentThread().setContextClassLoader(currentCl); } } else { errorWriter.println("Quarkus bootstrapped successfully."); started = true; } deploymentStatus = error.toString(); } static void setDelegates(String selectedDelegate, String selectedRawDelegate) { if (selectedDelegate != null) { try { Class<?> clazz = Class.forName(selectedDelegate); for (Method method : clazz.getDeclaredMethods()) { if (method.getName().equals("accept")) { if (method.getParameterTypes()[0] != Object.class) { parameterType = method.getParameterTypes()[0]; } } } delegate = (BackgroundFunction) Arc.container().instance(clazz).get(); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } if (selectedRawDelegate != null) { try { Class<?> clazz = Class.forName(selectedRawDelegate); rawDelegate = (RawBackgroundFunction) Arc.container().instance(clazz).get(); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } } @Override }
class QuarkusBackgroundFunction implements RawBackgroundFunction { protected static final String deploymentStatus; protected static boolean started = false; private static volatile BackgroundFunction delegate; private static volatile Class<?> parameterType; private static volatile RawBackgroundFunction rawDelegate; static { StringWriter error = new StringWriter(); PrintWriter errorWriter = new PrintWriter(error, true); if (Application.currentApplication() == null) { ClassLoader currentCl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(QuarkusBackgroundFunction.class.getClassLoader()); Class<?> appClass = Class.forName("io.quarkus.runner.ApplicationImpl"); String[] args = {}; Application app = (Application) appClass.getConstructor().newInstance(); app.start(args); errorWriter.println("Quarkus bootstrapped successfully."); started = true; } catch (Exception ex) { errorWriter.println("Quarkus bootstrap failed."); ex.printStackTrace(errorWriter); } finally { Thread.currentThread().setContextClassLoader(currentCl); } } else { errorWriter.println("Quarkus bootstrapped successfully."); started = true; } deploymentStatus = error.toString(); } static void setDelegates(String selectedDelegate, String selectedRawDelegate) { if (selectedDelegate != null) { try { Class<?> clazz = Class.forName(selectedDelegate); for (Method method : clazz.getDeclaredMethods()) { if (method.getName().equals("accept")) { if (method.getParameterTypes()[0] != Object.class) { parameterType = method.getParameterTypes()[0]; } } } delegate = (BackgroundFunction) Arc.container().instance(clazz).get(); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } if (selectedRawDelegate != null) { try { Class<?> clazz = Class.forName(selectedRawDelegate); rawDelegate = (RawBackgroundFunction) Arc.container().instance(clazz).get(); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } } @Override }
Yes. I ran the live test to record the response but found it is no longer a valid test.
static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult2() { TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(156, 1); final HealthcareEntity healthcareEntity1 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "six minutes"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.TIME); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 0.87); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 21); HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity2 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "minimal"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.CONDITION_QUALIFIER); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 38); HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 7); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity3 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 46); HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 44); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity5 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "fatigue"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity5, "Fatigue"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 108); HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 7); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity6 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity6, "wrist pain"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity6, "Pain in wrist"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity6, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity6, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity6, 120); HealthcareEntityPropertiesHelper.setLength(healthcareEntity6, 10); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity6, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity7 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity7, "anginal equivalent"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity7, "Anginal equivalent"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity7, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity7, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity7, 137); HealthcareEntityPropertiesHelper.setLength(healthcareEntity7, 18); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity7, IterableStream.of(Collections.emptyList())); final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult("1", textDocumentStatistics, null); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult, new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity5, healthcareEntity6, healthcareEntity7))); final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Time"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity1); final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity3); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1, HealthcareEntityRelationType.TIME_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1, IterableStream.of(asList(role1, role2))); final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Qualifier"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity2); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2, HealthcareEntityRelationType.QUALIFIER_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2, IterableStream.of(asList(role3, role2))); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult, IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2))); return healthcareEntitiesResult; }
HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads");
static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult2() { TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(156, 1); final HealthcareEntity healthcareEntity1 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "six minutes"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.TIME); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 0.87); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 21); HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity2 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "minimal"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.CONDITION_QUALIFIER); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 38); HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 7); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity3 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 46); HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 44); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity5 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "fatigue"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity5, "Fatigue"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 108); HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 7); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity6 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity6, "wrist pain"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity6, "Pain in wrist"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity6, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity6, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity6, 120); HealthcareEntityPropertiesHelper.setLength(healthcareEntity6, 10); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity6, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity7 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity7, "anginal equivalent"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity7, "Anginal equivalent"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity7, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity7, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity7, 137); HealthcareEntityPropertiesHelper.setLength(healthcareEntity7, 18); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity7, IterableStream.of(Collections.emptyList())); final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult("1", textDocumentStatistics, null); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult, new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity5, healthcareEntity6, healthcareEntity7))); final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Time"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity1); final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity3); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1, HealthcareEntityRelationType.TIME_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1, IterableStream.of(asList(role1, role2))); final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Qualifier"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity2); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2, HealthcareEntityRelationType.QUALIFIER_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2, IterableStream.of(asList(role3, role2))); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult, IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2))); return healthcareEntitiesResult; }
class TestUtils { private static final String DEFAULT_MODEL_VERSION = "2019-10-01"; static final OffsetDateTime TIME_NOW = OffsetDateTime.now(); static final String INVALID_URL = "htttttttps: static final String VALID_HTTPS_LOCALHOST = "https: static final String FAKE_API_KEY = "1234567890"; static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY"; static final String CUSTOM_ACTION_NAME = "customActionName"; static final List<String> CUSTOM_ENTITIES_INPUT = asList( "David Schmidt, senior vice president--Food Safety, International Food Information Council (IFIC), Washington," + " D.C., discussed the physical activity component."); static final List<String> CUSTOM_SINGLE_CLASSIFICATION = asList( "A recent report by the Government Accountability Office (GAO) found that the dramatic increase in oil" + " and natural gas development on federal lands over the past six years has stretched the staff of " + "the BLM to a point that it has been unable to meet its environmental protection responsibilities."); static final List<String> CUSTOM_MULTI_CLASSIFICATION = asList( "I need a reservation for an indoor restaurant in China. Please don't stop the music. Play music and add" + " it to my playlist"); static final List<String> SUMMARY_INPUTS = asList( "At Microsoft, we have been on a quest to advance AI beyond existing techniques, by taking a more holistic," + " human-centric approach to learning and understanding. As Chief Technology Officer of Azure AI " + "Cognitive Services, I have been working with a team of amazing scientists and engineers to turn this" + " quest into a reality. In my role, I enjoy a unique perspective in viewing the relationship among " + "three attributes of human cognition: monolingual text (X), audio or visual sensory signals, (Y) and" + " multilingual (Z). At the intersection of all three, there’s magic—what we call XYZ-code as" + " illustrated in Figure 1—a joint representation to create more powerful AI that can speak, hear, see," + " and understand humans better. We believe XYZ-code will enable us to fulfill our long-term vision:" + " cross-domain transfer learning, spanning modalities and languages. The goal is to have pretrained" + " models that can jointly learn representations to support a broad range of downstream AI tasks, much" + " in the way humans do today. Over the past five years, we have achieved human performance on benchmarks" + " in conversational speech recognition, machine translation, conversational question answering, machine" + " reading comprehension, and image captioning. These five breakthroughs provided us with strong signals" + " toward our more ambitious aspiration to produce a leap in AI capabilities, achieving multisensory and" + " multilingual learning that is closer in line with how humans learn and understand. I believe the joint" + " XYZ-code is a foundational component of this aspiration, if grounded with external knowledge sources" + " in the downstream AI tasks." ); static final List<String> SENTIMENT_INPUTS = asList( "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "The restaurant had amazing gnocchi. The hotel was dark and unclean."); static final List<String> CATEGORIZED_ENTITY_INPUTS = asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); static final List<String> PII_ENTITY_INPUTS = asList( "Microsoft employee with ssn 859-98-0987 is using our awesome API's.", "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."); static final List<String> LINKED_ENTITY_INPUTS = asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); static final List<String> KEY_PHRASE_INPUTS = asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!"; static final List<String> KEY_PHRASE_FRENCH_INPUTS = asList( "Bonjour tout le monde.", "Je m'appelle Mondly."); static final List<String> DETECT_LANGUAGE_INPUTS = asList( "This is written in English", "Este es un documento escrito en Español.", "~@!~:)"); static final String PII_ENTITY_OFFSET_INPUT = "SSN: 859-98-0987"; static final String SENTIMENT_OFFSET_INPUT = "The hotel was unclean."; static final String HEALTHCARE_ENTITY_OFFSET_INPUT = "The patient is a 54-year-old"; static final List<String> HEALTHCARE_INPUTS = asList( "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.", "The patient went for six minutes with minimal ST depressions in the anterior lateral leads , thought due to fatigue and wrist pain , his anginal equivalent."); static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = asList("personal", "social"); static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null); static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null); static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = asList( DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH); static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = asList( DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH); static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null); static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]"; private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS = "AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS"; static List<DetectLanguageInput> getDetectLanguageInputs() { return asList( new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"), new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"), new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US") ); } static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() { return asList( new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"), new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US") ); } static List<TextDocumentInput> getDuplicateTextDocumentInputs() { return asList( new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)), new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)), new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)) ); } static List<TextDocumentInput> getWarningsTextDocumentInputs() { return asList( new TextDocumentInput("0", TOO_LONG_INPUT), new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1)) ); } static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) { return IntStream.range(0, inputs.size()) .mapToObj(index -> new TextDocumentInput(String.valueOf(index), inputs.get(index))) .collect(Collectors.toList()); } /** * Helper method to get the expected Batch Detected Languages * * @return A {@link DetectLanguageResultCollection}. */ static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() { final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3); final List<DetectLanguageResult> detectLanguageResultList = asList( new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()), new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()), new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage())); return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } static DetectedLanguage getDetectedLanguageEnglish() { return new DetectedLanguage("English", "en", 0.0, null); } static DetectedLanguage getDetectedLanguageSpanish() { return new DetectedLanguage("Spanish", "es", 0.0, null); } static DetectedLanguage getUnknownDetectedLanguage() { return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null); } /** * Helper method to get the expected Batch Categorized Entities * * @return A {@link RecognizeEntitiesResultCollection}. */ static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() { return new RecognizeEntitiesResultCollection( asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Categorized Entities List 1 */ static List<CategorizedEntity> getCategorizedEntitiesList1() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 18); CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 26); CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 34); return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3); } /** * Helper method to get the expected Categorized Entities List 2 */ static List<CategorizedEntity> getCategorizedEntitiesList2() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 10); return asList(categorizedEntity1); } /** * Helper method to get the expected Categorized entity result for PII document input. */ static List<CategorizedEntity> getCategorizedEntitiesForPiiInput() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 0); CategorizedEntity categorizedEntity2 = new CategorizedEntity("employee", EntityCategory.PERSON_TYPE, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 10); CategorizedEntity categorizedEntity3 = new CategorizedEntity("859", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 28); CategorizedEntity categorizedEntity4 = new CategorizedEntity("98", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity4, 32); CategorizedEntity categorizedEntity5 = new CategorizedEntity("0987", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity5, 35); CategorizedEntity categorizedEntity6 = new CategorizedEntity("API", EntityCategory.SKILL, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity6, 61); return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3, categorizedEntity4, categorizedEntity5, categorizedEntity6); } /** * Helper method to get the expected Batch Categorized Entities */ static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() { IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1()); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1); RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null)); return recognizeEntitiesResult1; } /** * Helper method to get the expected Batch Categorized Entities */ static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() { IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2()); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1); RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null)); return recognizeEntitiesResult2; } /** * Helper method to get the expected batch of Personally Identifiable Information entities */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntities() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList2()), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected batch of Personally Identifiable Information entities for domain filter */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForDomainFilter() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection( new IterableStream<>(getPiiEntitiesList1ForDomainFilter()), "********* employee with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection( new IterableStream<>(Arrays.asList(getPiiEntitiesList2().get(0), getPiiEntitiesList2().get(1), getPiiEntitiesList2().get(2))), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Categorized Entities List 1 */ static List<PiiEntity> getPiiEntitiesList1() { final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, "Microsoft"); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.ORGANIZATION); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null); PiiEntityPropertiesHelper.setOffset(piiEntity0, 0); final PiiEntity piiEntity1 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity1, "employee"); PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.fromString("PersonType")); PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null); PiiEntityPropertiesHelper.setOffset(piiEntity1, 10); final PiiEntity piiEntity2 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity2, "859-98-0987"); PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null); PiiEntityPropertiesHelper.setOffset(piiEntity2, 28); return asList(piiEntity0, piiEntity1, piiEntity2); } static List<PiiEntity> getPiiEntitiesList1ForDomainFilter() { return Arrays.asList(getPiiEntitiesList1().get(0), getPiiEntitiesList1().get(2)); } /** * Helper method to get the expected Categorized Entities List 2 */ static List<PiiEntity> getPiiEntitiesList2() { String expectedText = "111000025"; final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.PHONE_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity0, 0.8); PiiEntityPropertiesHelper.setOffset(piiEntity0, 18); final PiiEntity piiEntity1 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity1, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.ABA_ROUTING_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity1, 0.75); PiiEntityPropertiesHelper.setOffset(piiEntity1, 18); final PiiEntity piiEntity2 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity2, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.NZ_SOCIAL_WELFARE_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity2, 0.65); PiiEntityPropertiesHelper.setOffset(piiEntity2, 18); return asList(piiEntity0, piiEntity1, piiEntity2); } /** * Helper method to get the expected batch of Personally Identifiable Information entities for categories filter */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForCategoriesFilter() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection( new IterableStream<>(asList(getPiiEntitiesList1().get(2))), "Microsoft employee with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection( new IterableStream<>(asList(getPiiEntitiesList2().get(1))), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", null, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", null, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Batch Linked Entities * @return A {@link RecognizeLinkedEntitiesResultCollection}. */ static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() { final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2); final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = asList( new RecognizeLinkedEntitiesResult( "0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult( "1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null))); return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } /** * Helper method to get the expected linked Entities List 1 */ static List<LinkedEntity> getLinkedEntitiesList1() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 26); LinkedEntity linkedEntity = new LinkedEntity( "Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Seattle", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "5fbba6b8-85e1-4d41-9444-d9055436e473"); return asList(linkedEntity); } /** * Helper method to get the expected linked Entities List 2 */ static List<LinkedEntity> getLinkedEntitiesList2() { LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 10); LinkedEntity linkedEntity = new LinkedEntity( "Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Microsoft", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85"); return asList(linkedEntity); } static List<LinkedEntity> getLinkedEntitiesList3() { LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 0); LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("API's", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch1, 61); LinkedEntity linkedEntity = new LinkedEntity( "Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Microsoft", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85"); LinkedEntity linkedEntity1 = new LinkedEntity( "Application programming interface", new IterableStream<>(Collections.singletonList(linkedEntityMatch1)), "en", "Application programming interface", "https: "Wikipedia"); return asList(linkedEntity, linkedEntity1); } /** * Helper method to get the expected Batch Key Phrases. */ static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() { TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1); ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(asList("Hello world", "input text")), null)); ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(asList("Bonjour", "monde")), null)); TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2); List<ExtractKeyPhraseResult> extractKeyPhraseResultList = asList(extractKeyPhraseResult1, extractKeyPhraseResult2); return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } /** * Helper method to get the expected Batch Text Sentiments */ static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() { final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1); final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics, null, getExpectedDocumentSentiment()); final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics, null, getExpectedDocumentSentiment2()); return new AnalyzeSentimentResultCollection( asList(analyzeSentimentResult1, analyzeSentimentResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method that get the first expected DocumentSentiment result. */ static DocumentSentiment getExpectedDocumentSentiment() { final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 14); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0); final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 23); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0); final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 51); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0); final TargetSentiment targetSentiment1 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment1, "hotel"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.NEGATIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 4); final SentenceOpinion sentenceOpinion1 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1, new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2))); final TargetSentiment targetSentiment2 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment2, "gnocchi"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.POSITIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 59); final SentenceOpinion sentenceOpinion2 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2, new IterableStream<>(asList(assessmentSentiment3))); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "The hotel was dark and unclean.", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 31); final SentenceSentiment sentenceSentiment2 = new SentenceSentiment( "The restaurant had amazing gnocchi.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 32); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 35); return new DocumentSentiment(TextSentiment.MIXED, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)), null); } /** * Helper method that get the second expected DocumentSentiment result. */ static DocumentSentiment getExpectedDocumentSentiment2() { final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 50); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0); final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 59); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0); final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 19); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0); final TargetSentiment targetSentiment1 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment1, "gnocchi"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.POSITIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 27); final SentenceOpinion sentenceOpinion1 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1, new IterableStream<>(asList(assessmentSentiment3))); final TargetSentiment targetSentiment2 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment2, "hotel"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.NEGATIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 40); final SentenceOpinion sentenceOpinion2 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2, new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2))); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "The restaurant had amazing gnocchi.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 35); final SentenceSentiment sentenceSentiment2 = new SentenceSentiment( "The hotel was dark and unclean.", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 36); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 31); return new DocumentSentiment(TextSentiment.MIXED, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)), null); } /* * This is the expected result for testing an input: * "I had a wonderful trip to Seattle last week." */ static DocumentSentiment getExpectedDocumentSentimentForActions() { final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "I had a wonderful trip to Seattle last week.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 44); return new DocumentSentiment(TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1)), null); } /* * This is the expected result for testing an input: * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static DocumentSentiment getExpectedDocumentSentimentForActions2() { final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "Microsoft employee with ssn 859-98-0987 is using our awesome API's.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 67); return new DocumentSentiment(TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1)), null); } /** * Helper method that get a single-page {@link AnalyzeHealthcareEntitiesResultCollection} list. */ static List<AnalyzeHealthcareEntitiesResultCollection> getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage() { return asList( getExpectedAnalyzeHealthcareEntitiesResultCollection(2, asList(getRecognizeHealthcareEntitiesResult1("0"), getRecognizeHealthcareEntitiesResult2()))); } /** * Helper method that get a multiple-pages {@link AnalyzeHealthcareEntitiesResultCollection} list. */ static List<AnalyzeHealthcareEntitiesResultCollection> getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(int startIndex, int firstPage, int secondPage) { List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults1 = new ArrayList<>(); int i = startIndex; for (; i < startIndex + firstPage; i++) { healthcareEntitiesResults1.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i))); } List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults2 = new ArrayList<>(); for (; i < startIndex + firstPage + secondPage; i++) { healthcareEntitiesResults2.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i))); } List<AnalyzeHealthcareEntitiesResultCollection> result = new ArrayList<>(); result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(firstPage, healthcareEntitiesResults1)); if (secondPage != 0) { result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(secondPage, healthcareEntitiesResults2)); } return result; } /** * Helper method that get the expected {@link AnalyzeHealthcareEntitiesResultCollection} result. * * @param sizePerPage batch size per page. * @param healthcareEntitiesResults a collection of {@link AnalyzeHealthcareEntitiesResult}. */ static AnalyzeHealthcareEntitiesResultCollection getExpectedAnalyzeHealthcareEntitiesResultCollection( int sizePerPage, List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults) { TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics( sizePerPage, sizePerPage, 0, sizePerPage); final AnalyzeHealthcareEntitiesResultCollection analyzeHealthcareEntitiesResultCollection = new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(healthcareEntitiesResults)); AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setModelVersion(analyzeHealthcareEntitiesResultCollection, "2020-09-03"); AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setStatistics(analyzeHealthcareEntitiesResultCollection, textDocumentBatchStatistics); return analyzeHealthcareEntitiesResultCollection; } /** * Result for * "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.", */ static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult1(String documentId) { TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(105, 1); final HealthcareEntity healthcareEntity1 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "54-year-old"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.AGE); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 17); HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity2 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "gentleman"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity2, "Male population group"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.GENDER); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 29); HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 9); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity3 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "progressive"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.fromString("Course")); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 0.91); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 57); HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity4 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity4, "angina"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity4, "Angina Pectoris"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity4, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity4, 0.81); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity4, 69); HealthcareEntityPropertiesHelper.setLength(healthcareEntity4, 6); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4, IterableStream.of(Collections.emptyList())); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity5 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "past several months"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.TIME); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 85); HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 19); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5, IterableStream.of(Collections.emptyList())); final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult1 = new AnalyzeHealthcareEntitiesResult(documentId, textDocumentStatistics1, null); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult1, new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity4, healthcareEntity5))); final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Course"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity3); final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity4); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1, HealthcareEntityRelationType.fromString("CourseOfCondition")); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1, IterableStream.of(asList(role1, role2))); final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Time"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity5); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2, HealthcareEntityRelationType.TIME_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2, IterableStream.of(asList(role2, role3))); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult1, IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2))); return healthcareEntitiesResult1; } /** * Result for * "The patient went for six minutes with minimal ST depressions in the anterior lateral leads , * thought due to fatigue and wrist pain , his anginal equivalent." */ /** * RecognizeEntitiesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollection() { return new RecognizeEntitiesResultCollection( asList(new RecognizeEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesList1()), null)), new RecognizeEntitiesResult("1", new TextDocumentStatistics(67, 1), null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null)) ), "2020-04-01", new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * RecognizePiiEntitiesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollection() { final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, "last week"); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.fromString("DateTime")); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, "DateRange"); PiiEntityPropertiesHelper.setOffset(piiEntity0, 34); return new RecognizePiiEntitiesResultCollection( asList( new RecognizePiiEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new PiiEntityCollection(new IterableStream<>(Arrays.asList(piiEntity0)), "I had a wonderful trip to Seattle *********.", null)), new RecognizePiiEntitiesResult("1", new TextDocumentStatistics(67, 1), null, new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null))), "2020-07-01", new TextDocumentBatchStatistics(2, 2, 0, 2) ); } /** * ExtractKeyPhrasesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollection() { return new ExtractKeyPhrasesResultCollection( asList(new ExtractKeyPhraseResult("0", new TextDocumentStatistics(44, 1), null, new KeyPhrasesCollection(new IterableStream<>(asList("wonderful trip", "Seattle")), null)), new ExtractKeyPhraseResult("1", new TextDocumentStatistics(67, 1), null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollection() { return new RecognizeLinkedEntitiesResultCollection( asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)) ), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForActions() { return new RecognizeLinkedEntitiesResultCollection( asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null)) ), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForActions() { final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", null, null, getExpectedDocumentSentimentForActions()); final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", null, null, getExpectedDocumentSentimentForActions2()); return new AnalyzeSentimentResultCollection( asList(analyzeSentimentResult1, analyzeSentimentResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeEntitiesActionResult getExpectedRecognizeEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completeAt, RecognizeEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static RecognizePiiEntitiesActionResult getExpectedRecognizePiiEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completedAt, RecognizePiiEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static ExtractKeyPhrasesActionResult getExpectedExtractKeyPhrasesActionResult(boolean isError, String actionName, OffsetDateTime completedAt, ExtractKeyPhrasesResultCollection resultCollection, TextAnalyticsError actionError) { ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static RecognizeLinkedEntitiesActionResult getExpectedRecognizeLinkedEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completeAt, RecognizeLinkedEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static AnalyzeSentimentActionResult getExpectedAnalyzeSentimentActionResult(boolean isError, String actionName, OffsetDateTime completeAt, AnalyzeSentimentResultCollection resultCollection, TextAnalyticsError actionError) { AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } /** * Helper method that get the expected AnalyzeBatchActionsResult result. */ static AnalyzeActionsResult getExpectedAnalyzeBatchActionsResult( IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults, IterableStream<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults, IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults, IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults, IterableStream<AnalyzeSentimentActionResult> analyzeSentimentActionResults) { final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, recognizeEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, recognizePiiEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, extractKeyPhrasesActionResults); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, recognizeLinkedEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, analyzeSentimentActionResults); return analyzeActionsResult; } /** * CategorizedEntityCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollectionForPagination(int startIndex, int documentCount) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(Integer.toString(i), null, null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null))); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, "2020-04-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)); } /** * RecognizePiiEntitiesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollectionForPagination(int startIndex, int documentCount) { List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizePiiEntitiesResults.add(new RecognizePiiEntitiesResult(Integer.toString(i), null, null, new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null))); } return new RecognizePiiEntitiesResultCollection(recognizePiiEntitiesResults, "2020-07-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * ExtractKeyPhrasesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollectionForPagination(int startIndex, int documentCount) { List<ExtractKeyPhraseResult> extractKeyPhraseResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { extractKeyPhraseResults.add(new ExtractKeyPhraseResult(Integer.toString(i), null, null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))); } return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResults, "2020-07-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)); } /** * RecognizeLinkedEntitiesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForPagination( int startIndex, int documentCount) { List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizeLinkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(Integer.toString(i), null, null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null))); } return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResults, "", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * AnalyzeSentimentResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForPagination( int startIndex, int documentCount) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { analyzeSentimentResults.add(new AnalyzeSentimentResult(Integer.toString(i), null, null, getExpectedDocumentSentimentForActions2())); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, "", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * Helper method that get a multiple-pages (AnalyzeActionsResult) list. */ static List<AnalyzeActionsResult> getExpectedAnalyzeActionsResultListForMultiplePages(int startIndex, int firstPage, int secondPage) { List<AnalyzeActionsResult> analyzeActionsResults = new ArrayList<>(); analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult( IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult( false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult( false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult( false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult( false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult( false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, firstPage), null))) )); startIndex += firstPage; analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult( IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult( false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult( false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult( false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult( false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult( false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, secondPage), null))) )); return analyzeActionsResults; } /** * Helper method that get a customized TextAnalyticsError. */ static TextAnalyticsError getActionError(TextAnalyticsErrorCode errorCode, String taskName, String index) { return new TextAnalyticsError(errorCode, "", " } /** * Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and * service versions that should be tested. * * @return A stream of HttpClient and service version combinations to test. */ static Stream<Arguments> getTestParameters() { List<Arguments> argumentsList = new ArrayList<>(); getHttpClients() .forEach(httpClient -> { Arrays.stream(TextAnalyticsServiceVersion.values()).filter( TestUtils::shouldServiceVersionBeTested) .forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion))); }); return argumentsList.stream(); } /** * Returns whether the given service version match the rules of test framework. * * <ul> * <li>Using latest service version as default if no environment variable is set.</li> * <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li> * <li>Otherwise, Service version string should match env variable.</li> * </ul> * * Environment values currently supported are: "ALL", "${version}". * Use comma to separate http clients want to test. * e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0} * * @param serviceVersion ServiceVersion needs to check * @return Boolean indicates whether filters out the service version or not. */ private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) { String serviceVersionFromEnv = Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS); if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) { return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion); } if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) { return true; } String[] configuredServiceVersionList = serviceVersionFromEnv.split(","); return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion -> serviceVersion.getVersion().equals(configuredServiceVersion.trim())); } private TestUtils() { } }
class TestUtils { private static final String DEFAULT_MODEL_VERSION = "2019-10-01"; static final OffsetDateTime TIME_NOW = OffsetDateTime.now(); static final String INVALID_URL = "htttttttps: static final String VALID_HTTPS_LOCALHOST = "https: static final String FAKE_API_KEY = "1234567890"; static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY"; static final String CUSTOM_ACTION_NAME = "customActionName"; static final List<String> CUSTOM_ENTITIES_INPUT = asList( "David Schmidt, senior vice president--Food Safety, International Food Information Council (IFIC), Washington," + " D.C., discussed the physical activity component."); static final List<String> CUSTOM_SINGLE_CLASSIFICATION = asList( "A recent report by the Government Accountability Office (GAO) found that the dramatic increase in oil" + " and natural gas development on federal lands over the past six years has stretched the staff of " + "the BLM to a point that it has been unable to meet its environmental protection responsibilities."); static final List<String> CUSTOM_MULTI_CLASSIFICATION = asList( "I need a reservation for an indoor restaurant in China. Please don't stop the music. Play music and add" + " it to my playlist"); static final List<String> SUMMARY_INPUTS = asList( "At Microsoft, we have been on a quest to advance AI beyond existing techniques, by taking a more holistic," + " human-centric approach to learning and understanding. As Chief Technology Officer of Azure AI " + "Cognitive Services, I have been working with a team of amazing scientists and engineers to turn this" + " quest into a reality. In my role, I enjoy a unique perspective in viewing the relationship among " + "three attributes of human cognition: monolingual text (X), audio or visual sensory signals, (Y) and" + " multilingual (Z). At the intersection of all three, there’s magic—what we call XYZ-code as" + " illustrated in Figure 1—a joint representation to create more powerful AI that can speak, hear, see," + " and understand humans better. We believe XYZ-code will enable us to fulfill our long-term vision:" + " cross-domain transfer learning, spanning modalities and languages. The goal is to have pretrained" + " models that can jointly learn representations to support a broad range of downstream AI tasks, much" + " in the way humans do today. Over the past five years, we have achieved human performance on benchmarks" + " in conversational speech recognition, machine translation, conversational question answering, machine" + " reading comprehension, and image captioning. These five breakthroughs provided us with strong signals" + " toward our more ambitious aspiration to produce a leap in AI capabilities, achieving multisensory and" + " multilingual learning that is closer in line with how humans learn and understand. I believe the joint" + " XYZ-code is a foundational component of this aspiration, if grounded with external knowledge sources" + " in the downstream AI tasks." ); static final List<String> SENTIMENT_INPUTS = asList( "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "The restaurant had amazing gnocchi. The hotel was dark and unclean."); static final List<String> CATEGORIZED_ENTITY_INPUTS = asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); static final List<String> PII_ENTITY_INPUTS = asList( "Microsoft employee with ssn 859-98-0987 is using our awesome API's.", "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."); static final List<String> LINKED_ENTITY_INPUTS = asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); static final List<String> KEY_PHRASE_INPUTS = asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!"; static final List<String> KEY_PHRASE_FRENCH_INPUTS = asList( "Bonjour tout le monde.", "Je m'appelle Mondly."); static final List<String> DETECT_LANGUAGE_INPUTS = asList( "This is written in English", "Este es un documento escrito en Español.", "~@!~:)"); static final String PII_ENTITY_OFFSET_INPUT = "SSN: 859-98-0987"; static final String SENTIMENT_OFFSET_INPUT = "The hotel was unclean."; static final String HEALTHCARE_ENTITY_OFFSET_INPUT = "The patient is a 54-year-old"; static final List<String> HEALTHCARE_INPUTS = asList( "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.", "The patient went for six minutes with minimal ST depressions in the anterior lateral leads , thought due to fatigue and wrist pain , his anginal equivalent."); static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = asList("personal", "social"); static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null); static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null); static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = asList( DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH); static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = asList( DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH); static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null); static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]"; private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS = "AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS"; static List<DetectLanguageInput> getDetectLanguageInputs() { return asList( new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"), new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"), new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US") ); } static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() { return asList( new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"), new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US") ); } static List<TextDocumentInput> getDuplicateTextDocumentInputs() { return asList( new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)), new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)), new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)) ); } static List<TextDocumentInput> getWarningsTextDocumentInputs() { return asList( new TextDocumentInput("0", TOO_LONG_INPUT), new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1)) ); } static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) { return IntStream.range(0, inputs.size()) .mapToObj(index -> new TextDocumentInput(String.valueOf(index), inputs.get(index))) .collect(Collectors.toList()); } /** * Helper method to get the expected Batch Detected Languages * * @return A {@link DetectLanguageResultCollection}. */ static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() { final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3); final List<DetectLanguageResult> detectLanguageResultList = asList( new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()), new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()), new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage())); return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } static DetectedLanguage getDetectedLanguageEnglish() { return new DetectedLanguage("English", "en", 0.0, null); } static DetectedLanguage getDetectedLanguageSpanish() { return new DetectedLanguage("Spanish", "es", 0.0, null); } static DetectedLanguage getUnknownDetectedLanguage() { return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null); } /** * Helper method to get the expected Batch Categorized Entities * * @return A {@link RecognizeEntitiesResultCollection}. */ static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() { return new RecognizeEntitiesResultCollection( asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Categorized Entities List 1 */ static List<CategorizedEntity> getCategorizedEntitiesList1() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 18); CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 26); CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 34); return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3); } /** * Helper method to get the expected Categorized Entities List 2 */ static List<CategorizedEntity> getCategorizedEntitiesList2() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 10); return asList(categorizedEntity1); } /** * Helper method to get the expected Categorized entity result for PII document input. */ static List<CategorizedEntity> getCategorizedEntitiesForPiiInput() { CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 0); CategorizedEntity categorizedEntity2 = new CategorizedEntity("employee", EntityCategory.PERSON_TYPE, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 10); CategorizedEntity categorizedEntity3 = new CategorizedEntity("859", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 28); CategorizedEntity categorizedEntity4 = new CategorizedEntity("98", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity4, 32); CategorizedEntity categorizedEntity5 = new CategorizedEntity("0987", EntityCategory.QUANTITY, "Number", 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity5, 35); CategorizedEntity categorizedEntity6 = new CategorizedEntity("API", EntityCategory.SKILL, null, 0.0); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity6, 61); return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3, categorizedEntity4, categorizedEntity5, categorizedEntity6); } /** * Helper method to get the expected Batch Categorized Entities */ static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() { IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1()); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1); RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null)); return recognizeEntitiesResult1; } /** * Helper method to get the expected Batch Categorized Entities */ static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() { IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2()); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1); RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null)); return recognizeEntitiesResult2; } /** * Helper method to get the expected batch of Personally Identifiable Information entities */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntities() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList2()), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected batch of Personally Identifiable Information entities for domain filter */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForDomainFilter() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection( new IterableStream<>(getPiiEntitiesList1ForDomainFilter()), "********* employee with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection( new IterableStream<>(Arrays.asList(getPiiEntitiesList2().get(0), getPiiEntitiesList2().get(1), getPiiEntitiesList2().get(2))), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Categorized Entities List 1 */ static List<PiiEntity> getPiiEntitiesList1() { final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, "Microsoft"); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.ORGANIZATION); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null); PiiEntityPropertiesHelper.setOffset(piiEntity0, 0); final PiiEntity piiEntity1 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity1, "employee"); PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.fromString("PersonType")); PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null); PiiEntityPropertiesHelper.setOffset(piiEntity1, 10); final PiiEntity piiEntity2 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity2, "859-98-0987"); PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null); PiiEntityPropertiesHelper.setOffset(piiEntity2, 28); return asList(piiEntity0, piiEntity1, piiEntity2); } static List<PiiEntity> getPiiEntitiesList1ForDomainFilter() { return Arrays.asList(getPiiEntitiesList1().get(0), getPiiEntitiesList1().get(2)); } /** * Helper method to get the expected Categorized Entities List 2 */ static List<PiiEntity> getPiiEntitiesList2() { String expectedText = "111000025"; final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.PHONE_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity0, 0.8); PiiEntityPropertiesHelper.setOffset(piiEntity0, 18); final PiiEntity piiEntity1 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity1, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.ABA_ROUTING_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity1, 0.75); PiiEntityPropertiesHelper.setOffset(piiEntity1, 18); final PiiEntity piiEntity2 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity2, expectedText); PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.NZ_SOCIAL_WELFARE_NUMBER); PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity2, 0.65); PiiEntityPropertiesHelper.setOffset(piiEntity2, 18); return asList(piiEntity0, piiEntity1, piiEntity2); } /** * Helper method to get the expected batch of Personally Identifiable Information entities for categories filter */ static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForCategoriesFilter() { PiiEntityCollection piiEntityCollection = new PiiEntityCollection( new IterableStream<>(asList(getPiiEntitiesList1().get(2))), "Microsoft employee with ssn *********** is using our awesome API's.", null); PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection( new IterableStream<>(asList(getPiiEntitiesList2().get(1))), "Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null); RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", null, null, piiEntityCollection); RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", null, null, piiEntityCollection2); return new RecognizePiiEntitiesResultCollection( asList(recognizeEntitiesResult1, recognizeEntitiesResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method to get the expected Batch Linked Entities * @return A {@link RecognizeLinkedEntitiesResultCollection}. */ static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() { final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2); final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = asList( new RecognizeLinkedEntitiesResult( "0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult( "1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null))); return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } /** * Helper method to get the expected linked Entities List 1 */ static List<LinkedEntity> getLinkedEntitiesList1() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 26); LinkedEntity linkedEntity = new LinkedEntity( "Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Seattle", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "5fbba6b8-85e1-4d41-9444-d9055436e473"); return asList(linkedEntity); } /** * Helper method to get the expected linked Entities List 2 */ static List<LinkedEntity> getLinkedEntitiesList2() { LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 10); LinkedEntity linkedEntity = new LinkedEntity( "Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Microsoft", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85"); return asList(linkedEntity); } static List<LinkedEntity> getLinkedEntitiesList3() { LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 0); LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("API's", 0.0); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch1, 61); LinkedEntity linkedEntity = new LinkedEntity( "Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)), "en", "Microsoft", "https: "Wikipedia"); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85"); LinkedEntity linkedEntity1 = new LinkedEntity( "Application programming interface", new IterableStream<>(Collections.singletonList(linkedEntityMatch1)), "en", "Application programming interface", "https: "Wikipedia"); return asList(linkedEntity, linkedEntity1); } /** * Helper method to get the expected Batch Key Phrases. */ static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() { TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1); TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1); ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(asList("Hello world", "input text")), null)); ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(asList("Bonjour", "monde")), null)); TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2); List<ExtractKeyPhraseResult> extractKeyPhraseResultList = asList(extractKeyPhraseResult1, extractKeyPhraseResult2); return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics); } /** * Helper method to get the expected Batch Text Sentiments */ static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() { final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1); final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics, null, getExpectedDocumentSentiment()); final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics, null, getExpectedDocumentSentiment2()); return new AnalyzeSentimentResultCollection( asList(analyzeSentimentResult1, analyzeSentimentResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * Helper method that get the first expected DocumentSentiment result. */ static DocumentSentiment getExpectedDocumentSentiment() { final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 14); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0); final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 23); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0); final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 51); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0); final TargetSentiment targetSentiment1 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment1, "hotel"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.NEGATIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 4); final SentenceOpinion sentenceOpinion1 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1, new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2))); final TargetSentiment targetSentiment2 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment2, "gnocchi"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.POSITIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 59); final SentenceOpinion sentenceOpinion2 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2, new IterableStream<>(asList(assessmentSentiment3))); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "The hotel was dark and unclean.", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 31); final SentenceSentiment sentenceSentiment2 = new SentenceSentiment( "The restaurant had amazing gnocchi.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 32); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 35); return new DocumentSentiment(TextSentiment.MIXED, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)), null); } /** * Helper method that get the second expected DocumentSentiment result. */ static DocumentSentiment getExpectedDocumentSentiment2() { final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 50); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0); final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 59); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0); final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing"); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3, new SentimentConfidenceScores(0.0, 0.0, 0.0)); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 19); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0); final TargetSentiment targetSentiment1 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment1, "gnocchi"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.POSITIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 27); final SentenceOpinion sentenceOpinion1 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1, new IterableStream<>(asList(assessmentSentiment3))); final TargetSentiment targetSentiment2 = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment2, "hotel"); TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.NEGATIVE); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2, new SentimentConfidenceScores(0.0, 0.0, 0.0)); TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 40); final SentenceOpinion sentenceOpinion2 = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2, new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2))); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "The restaurant had amazing gnocchi.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 35); final SentenceSentiment sentenceSentiment2 = new SentenceSentiment( "The hotel was dark and unclean.", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2))); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 36); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 31); return new DocumentSentiment(TextSentiment.MIXED, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)), null); } /* * This is the expected result for testing an input: * "I had a wonderful trip to Seattle last week." */ static DocumentSentiment getExpectedDocumentSentimentForActions() { final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "I had a wonderful trip to Seattle last week.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 44); return new DocumentSentiment(TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1)), null); } /* * This is the expected result for testing an input: * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static DocumentSentiment getExpectedDocumentSentimentForActions2() { final SentenceSentiment sentenceSentiment1 = new SentenceSentiment( "Microsoft employee with ssn 859-98-0987 is using our awesome API's.", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 67); return new DocumentSentiment(TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0), new IterableStream<>(asList(sentenceSentiment1)), null); } /** * Helper method that get a single-page {@link AnalyzeHealthcareEntitiesResultCollection} list. */ static List<AnalyzeHealthcareEntitiesResultCollection> getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage() { return asList( getExpectedAnalyzeHealthcareEntitiesResultCollection(2, asList(getRecognizeHealthcareEntitiesResult1("0"), getRecognizeHealthcareEntitiesResult2()))); } /** * Helper method that get a multiple-pages {@link AnalyzeHealthcareEntitiesResultCollection} list. */ static List<AnalyzeHealthcareEntitiesResultCollection> getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(int startIndex, int firstPage, int secondPage) { List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults1 = new ArrayList<>(); int i = startIndex; for (; i < startIndex + firstPage; i++) { healthcareEntitiesResults1.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i))); } List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults2 = new ArrayList<>(); for (; i < startIndex + firstPage + secondPage; i++) { healthcareEntitiesResults2.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i))); } List<AnalyzeHealthcareEntitiesResultCollection> result = new ArrayList<>(); result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(firstPage, healthcareEntitiesResults1)); if (secondPage != 0) { result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(secondPage, healthcareEntitiesResults2)); } return result; } /** * Helper method that get the expected {@link AnalyzeHealthcareEntitiesResultCollection} result. * * @param sizePerPage batch size per page. * @param healthcareEntitiesResults a collection of {@link AnalyzeHealthcareEntitiesResult}. */ static AnalyzeHealthcareEntitiesResultCollection getExpectedAnalyzeHealthcareEntitiesResultCollection( int sizePerPage, List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults) { TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics( sizePerPage, sizePerPage, 0, sizePerPage); final AnalyzeHealthcareEntitiesResultCollection analyzeHealthcareEntitiesResultCollection = new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(healthcareEntitiesResults)); AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setModelVersion(analyzeHealthcareEntitiesResultCollection, "2020-09-03"); AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setStatistics(analyzeHealthcareEntitiesResultCollection, textDocumentBatchStatistics); return analyzeHealthcareEntitiesResultCollection; } /** * Result for * "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.", */ static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult1(String documentId) { TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(105, 1); final HealthcareEntity healthcareEntity1 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "54-year-old"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.AGE); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 17); HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity2 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "gentleman"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity2, "Male population group"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.GENDER); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 29); HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 9); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity3 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "progressive"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.fromString("Course")); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 0.91); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 57); HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 11); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity4 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity4, "angina"); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity4, "Angina Pectoris"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity4, HealthcareEntityCategory.SYMPTOM_OR_SIGN); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity4, 0.81); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity4, 69); HealthcareEntityPropertiesHelper.setLength(healthcareEntity4, 6); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4, IterableStream.of(Collections.emptyList())); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4, IterableStream.of(Collections.emptyList())); final HealthcareEntity healthcareEntity5 = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "past several months"); HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.TIME); HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 85); HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 19); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5, IterableStream.of(Collections.emptyList())); final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult1 = new AnalyzeHealthcareEntitiesResult(documentId, textDocumentStatistics1, null); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult1, new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity4, healthcareEntity5))); final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Course"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity3); final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity4); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1, HealthcareEntityRelationType.fromString("CourseOfCondition")); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1, IterableStream.of(asList(role1, role2))); final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation(); final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Time"); HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity5); HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2, HealthcareEntityRelationType.TIME_OF_CONDITION); HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2, IterableStream.of(asList(role2, role3))); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult1, IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2))); return healthcareEntitiesResult1; } /** * Result for * "The patient went for six minutes with minimal ST depressions in the anterior lateral leads , * thought due to fatigue and wrist pain , his anginal equivalent." */ /** * RecognizeEntitiesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollection() { return new RecognizeEntitiesResultCollection( asList(new RecognizeEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesList1()), null)), new RecognizeEntitiesResult("1", new TextDocumentStatistics(67, 1), null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null)) ), "2020-04-01", new TextDocumentBatchStatistics(2, 2, 0, 2)); } /** * RecognizePiiEntitiesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollection() { final PiiEntity piiEntity0 = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity0, "last week"); PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.fromString("DateTime")); PiiEntityPropertiesHelper.setSubcategory(piiEntity0, "DateRange"); PiiEntityPropertiesHelper.setOffset(piiEntity0, 34); return new RecognizePiiEntitiesResultCollection( asList( new RecognizePiiEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new PiiEntityCollection(new IterableStream<>(Arrays.asList(piiEntity0)), "I had a wonderful trip to Seattle *********.", null)), new RecognizePiiEntitiesResult("1", new TextDocumentStatistics(67, 1), null, new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null))), "2020-07-01", new TextDocumentBatchStatistics(2, 2, 0, 2) ); } /** * ExtractKeyPhrasesResultCollection result for * "I had a wonderful trip to Seattle last week." * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollection() { return new ExtractKeyPhrasesResultCollection( asList(new ExtractKeyPhraseResult("0", new TextDocumentStatistics(44, 1), null, new KeyPhrasesCollection(new IterableStream<>(asList("wonderful trip", "Seattle")), null)), new ExtractKeyPhraseResult("1", new TextDocumentStatistics(67, 1), null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollection() { return new RecognizeLinkedEntitiesResultCollection( asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)) ), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForActions() { return new RecognizeLinkedEntitiesResultCollection( asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)), new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null)) ), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForActions() { final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", null, null, getExpectedDocumentSentimentForActions()); final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", null, null, getExpectedDocumentSentimentForActions2()); return new AnalyzeSentimentResultCollection( asList(analyzeSentimentResult1, analyzeSentimentResult2), DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2)); } static RecognizeEntitiesActionResult getExpectedRecognizeEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completeAt, RecognizeEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static RecognizePiiEntitiesActionResult getExpectedRecognizePiiEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completedAt, RecognizePiiEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static ExtractKeyPhrasesActionResult getExpectedExtractKeyPhrasesActionResult(boolean isError, String actionName, OffsetDateTime completedAt, ExtractKeyPhrasesResultCollection resultCollection, TextAnalyticsError actionError) { ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static RecognizeLinkedEntitiesActionResult getExpectedRecognizeLinkedEntitiesActionResult(boolean isError, String actionName, OffsetDateTime completeAt, RecognizeLinkedEntitiesResultCollection resultCollection, TextAnalyticsError actionError) { RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } static AnalyzeSentimentActionResult getExpectedAnalyzeSentimentActionResult(boolean isError, String actionName, OffsetDateTime completeAt, AnalyzeSentimentResultCollection resultCollection, TextAnalyticsError actionError) { AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection); TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt); TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError); return actionResult; } /** * Helper method that get the expected AnalyzeBatchActionsResult result. */ static AnalyzeActionsResult getExpectedAnalyzeBatchActionsResult( IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults, IterableStream<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults, IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults, IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults, IterableStream<AnalyzeSentimentActionResult> analyzeSentimentActionResults) { final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, recognizeEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, recognizePiiEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, extractKeyPhrasesActionResults); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, recognizeLinkedEntitiesActionResults); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, analyzeSentimentActionResults); return analyzeActionsResult; } /** * CategorizedEntityCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollectionForPagination(int startIndex, int documentCount) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(Integer.toString(i), null, null, new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null))); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, "2020-04-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)); } /** * RecognizePiiEntitiesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollectionForPagination(int startIndex, int documentCount) { List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizePiiEntitiesResults.add(new RecognizePiiEntitiesResult(Integer.toString(i), null, null, new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()), "********* ******** with ssn *********** is using our awesome API's.", null))); } return new RecognizePiiEntitiesResultCollection(recognizePiiEntitiesResults, "2020-07-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * ExtractKeyPhrasesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollectionForPagination(int startIndex, int documentCount) { List<ExtractKeyPhraseResult> extractKeyPhraseResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { extractKeyPhraseResults.add(new ExtractKeyPhraseResult(Integer.toString(i), null, null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))); } return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResults, "2020-07-01", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)); } /** * RecognizeLinkedEntitiesResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForPagination( int startIndex, int documentCount) { List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { recognizeLinkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(Integer.toString(i), null, null, new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null))); } return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResults, "", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * AnalyzeSentimentResultCollection result for * "Microsoft employee with ssn 859-98-0987 is using our awesome API's." */ static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForPagination( int startIndex, int documentCount) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (int i = startIndex; i < startIndex + documentCount; i++) { analyzeSentimentResults.add(new AnalyzeSentimentResult(Integer.toString(i), null, null, getExpectedDocumentSentimentForActions2())); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, "", new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount) ); } /** * Helper method that get a multiple-pages (AnalyzeActionsResult) list. */ static List<AnalyzeActionsResult> getExpectedAnalyzeActionsResultListForMultiplePages(int startIndex, int firstPage, int secondPage) { List<AnalyzeActionsResult> analyzeActionsResults = new ArrayList<>(); analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult( IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult( false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult( false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult( false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult( false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, firstPage), null))), IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult( false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, firstPage), null))) )); startIndex += firstPage; analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult( IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult( false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult( false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult( false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult( false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, secondPage), null))), IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult( false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, secondPage), null))) )); return analyzeActionsResults; } /** * Helper method that get a customized TextAnalyticsError. */ static TextAnalyticsError getActionError(TextAnalyticsErrorCode errorCode, String taskName, String index) { return new TextAnalyticsError(errorCode, "", " } /** * Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and * service versions that should be tested. * * @return A stream of HttpClient and service version combinations to test. */ static Stream<Arguments> getTestParameters() { List<Arguments> argumentsList = new ArrayList<>(); getHttpClients() .forEach(httpClient -> { Arrays.stream(TextAnalyticsServiceVersion.values()).filter( TestUtils::shouldServiceVersionBeTested) .forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion))); }); return argumentsList.stream(); } /** * Returns whether the given service version match the rules of test framework. * * <ul> * <li>Using latest service version as default if no environment variable is set.</li> * <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li> * <li>Otherwise, Service version string should match env variable.</li> * </ul> * * Environment values currently supported are: "ALL", "${version}". * Use comma to separate http clients want to test. * e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0} * * @param serviceVersion ServiceVersion needs to check * @return Boolean indicates whether filters out the service version or not. */ private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) { String serviceVersionFromEnv = Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS); if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) { return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion); } if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) { return true; } String[] configuredServiceVersionList = serviceVersionFromEnv.split(","); return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion -> serviceVersion.getVersion().equals(configuredServiceVersion.trim())); } private TestUtils() { } }
Don't we need to start context for on fail clause?
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName, isListener); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextToken.kind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context, expression); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); name = STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; default: STToken token = peek(); recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } /** * Parse default keyword. * * @return default keyword node */ private STNode parseDefaultKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DEFAULT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DEFAULT_KEYWORD); return parseDefaultKeyword(); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode typeParameter = parseTypeParameter(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter); } /** * Parse <code>map</code> or <code>future</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: return consume(); default: recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return parseParameterizedTypeKeyword(); } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { switch (peek().kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: STToken token = peek(); recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause];</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc); } private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TYPEDESC_KEYWORD: case READONLY_KEYWORD: case DISTINCT_KEYWORD: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case ERROR_KEYWORD: return SyntaxKind.ERROR_TYPE_DESC; default: return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [error-type-param] * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyword(); return parseErrorTypeDescriptor(errorKeywordToken); } private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) { STNode errorTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { errorTypeParamsNode = parseErrorTypeParamsNode(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error type param node. * <p> * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeParamsNode() { STNode ltToken = parseLTToken(); STNode parameter; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { parameter = consume(); } else { parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } STNode gtToken = parseGTToken(); return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse typedesc type descriptor. * typedesc-type-descriptor := typedesc type-parameter * * @return Parsed typedesc type node */ private STNode parseTypedescTypeDescriptor() { STNode typedescKeywordToken = parseTypedescKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode); } /** * Parse typedesc-keyword. * * @return Parsed typedesc-keyword node */ private STNode parseTypedescKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEDESC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEDESC_KEYWORD); return parseTypedescKeyword(); } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor() { STNode streamKeywordToken = parseStreamKeyword(); STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse xml type descriptor. * xml-type-descriptor := xml type-parameter * * @return Parsed typedesc type node */ private STNode parseXmlTypeDescriptor() { STNode xmlKeywordToken = parseXMLKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STREAM_KEYWORD); return parseStreamKeyword(); } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (true) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { break; } else { nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor() { STNode tableKeywordToken = parseTableKeyword(); STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := function function-signature</code> * * @return Function type descriptor node */ private STNode parseFunctionTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); STNode signature = parseFuncSignature(true); endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature); } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code> * * @param annots Annotations. * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param params Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode params) { List<STNode> paramList = new ArrayList<>(); paramList.add(params.expression); return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen, STNodeFactory.createNodeList(paramList), params.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTupleMemberRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-clause? * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseQueryConstructType(parseStreamKeyword(), null); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); return parseTableConstructorOrQueryInternal(isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? limit-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr); } if (selectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); STNode limitClause = parseLimitClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause, onConflictClause, limitClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: case LIMIT_KEYWORD: return null; default: recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); return parseIntermediateClause(isRhsExpr); } } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken.kind)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfOrderKeys(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(tokenKind); } } private boolean isQueryClauseStartToken(SyntaxKind tokenKind) { switch (tokenKind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; default: return false; } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode limitKeyword = consume(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken.kind)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); arguments = STNodeFactory.createEmptyNodeList(); closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(tokenKind); switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case FUNCTION_KEYWORD: case TABLE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case SERVICE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case DEFAULT_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); return STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = consume(); STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(openGTToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = consume(); STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause limit-clause? * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, boolean isRhsExpr) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); STNode limitClause = parseLimitClause(isRhsExpr); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); STNode nextToken = peek(); STNode endExpr; STNode colon; if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr; middleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix); colon = qualifiedNameRef.colon; endContext(); endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); } else { colon = parseColon(); endContext(); endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false); } return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := "transaction" block-stmt [on-fail-clause];</code> * * @return Transaction statement node */ private STNode parseTransactionStatement() { startContext(ParserRuleContext.TRANSACTION_STMT); STNode transactionKeyword = parseTransactionKeyword(); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse transaction keyword. * * @return parsed node */ private STNode parseTransactionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTION_KEYWORD); return parseTransactionKeyword(); } } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := "retry" retry-spec block-stmt * <br/> * retry-spec := [type-parameter] [ "(" arg-list ")" ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); endContext(); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse on fail clause. * <code>on-fail-clause := on fail typed-binding-pattern block-stmt</code> * * @return While statement */ private STNode parseOnFailClause() { STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typeDescriptorNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode identifierNode = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode blockStatement = parseBlockNode(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptorNode, identifierNode, blockStatement); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse service-constructor-expr. * <p> * <code> * service-constructor-expr := [annots] service service-body-block * <br/> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @param annots Annotations * @return Service constructor expression node */ private STNode parseServiceConstructorExpression(STNode annots) { startContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION); STNode serviceKeyword = parseServiceKeyword(); STNode serviceBody = parseServiceBody(); endContext(); return STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody); } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); STNode matchClauses = parseMatchClauses(); STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } /** * Parse match clauses list. * * @return Match clauses list */ private STNode parseMatchClauses() { List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClauses.add(clause); } return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { switch (peek().kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternEnd(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | functional-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { switch (peek().kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case IDENTIFIER_TOKEN: STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseFunctionalMatchPattern(consume()); default: recover(peek(), ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternEnd() { switch (peek().kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS); return parseMatchPatternEnd(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode restMatchPattern = null; STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = listMatchPatternMember; listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); isEndOfFields = true; break; } matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { STNode invalidField = parseListMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, listMatchPatternMemberRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_MATCH_PATTERNS_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); List<STNode> fieldMatchPatternList = new ArrayList<>(); STNode restMatchPattern = null; boolean isEndOfFields = false; while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = fieldMatchPatternMember; isEndOfFields = true; break; } fieldMatchPatternList.add(fieldMatchPatternMember); STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs != null) { fieldMatchPatternList.add(fieldMatchPatternRhs); } else { break; } } STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); while (isEndOfFields && fieldMatchPatternRhs != null) { STNode invalidField = parseFieldMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern, closeBraceToken); } private STNode parseFieldMatchPatternMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { return parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr); } private STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) { switch (nextToken) { case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } Solution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr); return parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern ) * <br/> * functionally-constructible-type-reference := error | type-reference * <br/> * type-reference := identifier | qualified-identifier * <br/> * arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns] * | other-arg-match-patterns * </p> * * @return Parsed functional match pattern node. */ private STNode parseFunctionalMatchPattern(STNode typeRef) { startContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListMatchPatternNode = parseArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); SyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN; while (!isEndOfFunctionalMatchPattern()) { STNode currentArg = parseArgMatchPattern(); DiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } STNode argRhs = parseArgMatchPatternRhs(); if (argRhs == null) { break; } if (errorCode == null) { argListMatchPatterns.add(argRhs); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null); } } return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isEndOfFunctionalMatchPattern() { switch (peek().kind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse arg match patterns. * <code> * arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern * </code> * <br/> * <br/> * * @return parsed arg match pattern node. */ private STNode parseArgMatchPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseNamedOrPositionalArgMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case VAR_KEYWORD: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN); return parseArgMatchPattern(); } } private STNode parseNamedOrPositionalArgMatchPattern() { STNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START); switch (peek().kind) { case EQUAL_TOKEN: return parseNamedArgMatchPattern(identifier); case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(identifier); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return identifier; } } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private STNode parseArgMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS); return parseArgMatchPatternRhs(); } } private DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { DiagnosticErrorCode errorCode = null; switch (prevArgKind) { case NAMED_ARG_MATCH_PATTERN: if (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && currentArgKind != SyntaxKind.REST_MATCH_PATTERN) { errorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG; } break; case REST_MATCH_PATTERN: errorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; break; default: break; } return errorCode; } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode finalKeyword = STNodeFactory.createEmptyNode(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(); case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypedBPOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isDefiniteExpr(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(); break; case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType() { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(true); endContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } private STNode parseTypedDescOrExprStartsWithOpenBracket() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> members = new ArrayList<>(); STNode memberEnd; while (!isEndOfListConstructor(peek().kind)) { STNode expr = parseTypeDescOrExpr(); members.add(expr); memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } members.add(memberEnd); } STNode memberNodes = STNodeFactory.createNodeList(members); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket); } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern); return createCaptureOrWildcardBP(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1); } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } STNode restBindingPattern; if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = prevMember; } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode identifier) { STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN; } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeRef = parseTypeReference(); break; case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START); if (firstArg.kind == SyntaxKind.CAPTURE_BINDING_PATTERN || firstArg.kind == SyntaxKind.WILDCARD_BINDING_PATTERN) { argListBindingPatterns.add(firstArg); STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS); if (isValidSecondArgBindingPattern(secondArg.kind)) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_BINDING_PATTERN && firstArg.kind != SyntaxKind.REST_BINDING_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } else { argListBindingPatterns.add(firstArg); } } parseErrorFieldBindingPatterns(argListBindingPatterns); return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isValidSecondArgBindingPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: return true; default: return false; } } private void parseErrorFieldBindingPatterns(List<STNode> argListBindingPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_BINDING_PATTERN; while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: return parseNamedOrSimpleArgBindingPattern(); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), context); return parseErrorArgListBindingPattern(context); } } private STNode parseNamedOrSimpleArgBindingPattern() { STNode argNameOrSimpleBindingPattern = consume(); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { switch (peek().kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case NONE: default: break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(member); memberList.add(memberEnd); STNode bindingPattern = parseAsListBindingPattern(openBracket, memberList); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if (!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext, isTypedBindingPattern); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); typeDesc = validateForUsageOfVar(typeDesc); STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = STNodeFactory.createNodeList(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket, context, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns; if (isEmpty(member)) { bindingPatterns = STNodeFactory.createEmptyNodeList(); } else { STNode bindingPattern = getBindingPattern(member); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, restBindingPattern, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @param context COntext in which the typed binding pattern occurs * @return Parsed node */ private STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } else { STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; default: return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList(boolean possibleMappingField) { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseMemberBracketedList(false); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } if (peek(2).kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPattern(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(nextToken, ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); return parseStatementStartBracketedListMember(); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
STNode typeDescriptorNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName, isListener); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextToken.kind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context, expression); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * </p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); name = STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; default: STToken token = peek(); recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } /** * Parse default keyword. * * @return default keyword node */ private STNode parseDefaultKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DEFAULT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DEFAULT_KEYWORD); return parseDefaultKeyword(); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode typeParameter = parseTypeParameter(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter); } /** * Parse <code>map</code> or <code>future</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: return consume(); default: recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return parseParameterizedTypeKeyword(); } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * type * | class * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: case CLASS_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { switch (peek().kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: STToken token = peek(); recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause]</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc); } private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TYPEDESC_KEYWORD: case READONLY_KEYWORD: case DISTINCT_KEYWORD: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case ERROR_KEYWORD: return SyntaxKind.ERROR_TYPE_DESC; default: return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [error-type-param] * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyword(); return parseErrorTypeDescriptor(errorKeywordToken); } private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) { STNode errorTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { errorTypeParamsNode = parseErrorTypeParamsNode(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error type param node. * <p> * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeParamsNode() { STNode ltToken = parseLTToken(); STNode parameter; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { parameter = consume(); } else { parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } STNode gtToken = parseGTToken(); return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse typedesc type descriptor. * typedesc-type-descriptor := typedesc type-parameter * * @return Parsed typedesc type node */ private STNode parseTypedescTypeDescriptor() { STNode typedescKeywordToken = parseTypedescKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode); } /** * Parse typedesc-keyword. * * @return Parsed typedesc-keyword node */ private STNode parseTypedescKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEDESC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEDESC_KEYWORD); return parseTypedescKeyword(); } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor() { STNode streamKeywordToken = parseStreamKeyword(); STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse xml type descriptor. * xml-type-descriptor := xml type-parameter * * @return Parsed typedesc type node */ private STNode parseXmlTypeDescriptor() { STNode xmlKeywordToken = parseXMLKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STREAM_KEYWORD); return parseStreamKeyword(); } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (true) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { break; } else { nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor() { STNode tableKeywordToken = parseTableKeyword(); STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := function function-signature</code> * * @return Function type descriptor node */ private STNode parseFunctionTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); STNode signature = parseFuncSignature(true); endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature); } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code> * * @param annots Annotations. * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param params Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode params) { List<STNode> paramList = new ArrayList<>(); paramList.add(params.expression); return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen, STNodeFactory.createNodeList(paramList), params.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTupleMemberRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseQueryConstructType(parseStreamKeyword(), null); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); return parseTableConstructorOrQueryInternal(isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr); } if (selectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause, onConflictClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case LIMIT_KEYWORD: return parseLimitClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: return null; default: recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); return parseIntermediateClause(isRhsExpr); } } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse limit-keyword. * * @return limit-keyword node */ private STNode parseLimitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LIMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LIMIT_KEYWORD); return parseLimitKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken.kind)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfOrderKeys(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(tokenKind); } } private boolean isQueryClauseStartToken(SyntaxKind tokenKind) { switch (tokenKind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; default: return false; } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); endContext(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STNode limitKeyword = parseLimitKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken.kind)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); arguments = STNodeFactory.createEmptyNodeList(); closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(tokenKind); switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case FUNCTION_KEYWORD: case TABLE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case SERVICE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case DEFAULT_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); return STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = consume(); STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(openGTToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = consume(); STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, boolean isRhsExpr) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); STNode nextToken = peek(); STNode endExpr; STNode colon; if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr; middleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix); colon = qualifiedNameRef.colon; endContext(); endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); } else { colon = parseColon(); endContext(); endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false); } return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code> * * @return Transaction statement node */ private STNode parseTransactionStatement() { startContext(ParserRuleContext.TRANSACTION_STMT); STNode transactionKeyword = parseTransactionKeyword(); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse transaction keyword. * * @return parsed node */ private STNode parseTransactionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTION_KEYWORD); return parseTransactionKeyword(); } } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause] * <br/> * retry-spec := [type-parameter] [ `(` arg-list `)` ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse optional on fail clause. * * @return Parsed node */ private STNode parseOptionalOnFailClause() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ON_KEYWORD) { return parseOnFailClause(); } if (isEndOfRegularCompoundStmt(nextToken.kind)) { return STNodeFactory.createEmptyNode(); } recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS); return parseOptionalOnFailClause(); } private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) { switch (nodeKind) { case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case AT_TOKEN: case EOF_TOKEN: return true; default: return isStatementStartingToken(nodeKind); } } private boolean isStatementStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: return true; default: if (isTypeStartingToken(nodeKind)) { return true; } if (isValidExpressionStart(nodeKind, 1)) { return true; } return false; } } /** * Parse on fail clause. * <p> * <code> * on-fail-clause := on fail typed-binding-pattern statement-block * </code> * * @return On fail clause node */ private STNode parseOnFailClause() { startContext(ParserRuleContext.ON_FAIL_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier, blockStatement); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse service-constructor-expr. * <p> * <code> * service-constructor-expr := [annots] service service-body-block * <br/> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @param annots Annotations * @return Service constructor expression node */ private STNode parseServiceConstructorExpression(STNode annots) { startContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION); STNode serviceKeyword = parseServiceKeyword(); STNode serviceBody = parseServiceBody(); endContext(); return STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody); } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); STNode matchClauses = parseMatchClauses(); STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } /** * Parse match clauses list. * * @return Match clauses list */ private STNode parseMatchClauses() { List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClauses.add(clause); } return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { switch (peek().kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternEnd(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | functional-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { switch (peek().kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case IDENTIFIER_TOKEN: STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseFunctionalMatchPattern(consume()); default: recover(peek(), ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternEnd() { switch (peek().kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS); return parseMatchPatternEnd(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode restMatchPattern = null; STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = listMatchPatternMember; listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); isEndOfFields = true; break; } matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { STNode invalidField = parseListMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, listMatchPatternMemberRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_MATCH_PATTERNS_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); List<STNode> fieldMatchPatternList = new ArrayList<>(); STNode restMatchPattern = null; boolean isEndOfFields = false; while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = fieldMatchPatternMember; isEndOfFields = true; break; } fieldMatchPatternList.add(fieldMatchPatternMember); STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs != null) { fieldMatchPatternList.add(fieldMatchPatternRhs); } else { break; } } STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); while (isEndOfFields && fieldMatchPatternRhs != null) { STNode invalidField = parseFieldMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern, closeBraceToken); } private STNode parseFieldMatchPatternMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { return parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr); } private STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) { switch (nextToken) { case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } Solution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr); return parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern ) * <br/> * functionally-constructible-type-reference := error | type-reference * <br/> * type-reference := identifier | qualified-identifier * <br/> * arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns] * | other-arg-match-patterns * </p> * * @return Parsed functional match pattern node. */ private STNode parseFunctionalMatchPattern(STNode typeRef) { startContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListMatchPatternNode = parseArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); SyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN; while (!isEndOfFunctionalMatchPattern()) { STNode currentArg = parseArgMatchPattern(); DiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } STNode argRhs = parseArgMatchPatternRhs(); if (argRhs == null) { break; } if (errorCode == null) { argListMatchPatterns.add(argRhs); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null); } } return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isEndOfFunctionalMatchPattern() { switch (peek().kind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse arg match patterns. * <code> * arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern * </code> * <br/> * <br/> * * @return parsed arg match pattern node. */ private STNode parseArgMatchPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseNamedOrPositionalArgMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case VAR_KEYWORD: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN); return parseArgMatchPattern(); } } private STNode parseNamedOrPositionalArgMatchPattern() { STNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START); switch (peek().kind) { case EQUAL_TOKEN: return parseNamedArgMatchPattern(identifier); case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(identifier); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return identifier; } } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private STNode parseArgMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS); return parseArgMatchPatternRhs(); } } private DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { DiagnosticErrorCode errorCode = null; switch (prevArgKind) { case NAMED_ARG_MATCH_PATTERN: if (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && currentArgKind != SyntaxKind.REST_MATCH_PATTERN) { errorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG; } break; case REST_MATCH_PATTERN: errorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; break; default: break; } return errorCode; } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode finalKeyword = STNodeFactory.createEmptyNode(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(); case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypedBPOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isDefiniteExpr(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(); break; case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType() { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(true); endContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } private STNode parseTypedDescOrExprStartsWithOpenBracket() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> members = new ArrayList<>(); STNode memberEnd; while (!isEndOfListConstructor(peek().kind)) { STNode expr = parseTypeDescOrExpr(); members.add(expr); memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } members.add(memberEnd); } STNode memberNodes = STNodeFactory.createNodeList(members); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket); } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern); return createCaptureOrWildcardBP(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1); } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } STNode restBindingPattern; if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = prevMember; } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode identifier) { STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN; } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeRef = parseTypeReference(); break; case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START); if (firstArg.kind == SyntaxKind.CAPTURE_BINDING_PATTERN || firstArg.kind == SyntaxKind.WILDCARD_BINDING_PATTERN) { argListBindingPatterns.add(firstArg); STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS); if (isValidSecondArgBindingPattern(secondArg.kind)) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_BINDING_PATTERN && firstArg.kind != SyntaxKind.REST_BINDING_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } else { argListBindingPatterns.add(firstArg); } } parseErrorFieldBindingPatterns(argListBindingPatterns); return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isValidSecondArgBindingPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: return true; default: return false; } } private void parseErrorFieldBindingPatterns(List<STNode> argListBindingPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_BINDING_PATTERN; while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: return parseNamedOrSimpleArgBindingPattern(); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), context); return parseErrorArgListBindingPattern(context); } } private STNode parseNamedOrSimpleArgBindingPattern() { STNode argNameOrSimpleBindingPattern = consume(); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { switch (peek().kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case NONE: default: break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(member); memberList.add(memberEnd); STNode bindingPattern = parseAsListBindingPattern(openBracket, memberList); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if (!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext, isTypedBindingPattern); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); typeDesc = validateForUsageOfVar(typeDesc); STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = STNodeFactory.createNodeList(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket, context, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns; if (isEmpty(member)) { bindingPatterns = STNodeFactory.createEmptyNodeList(); } else { STNode bindingPattern = getBindingPattern(member); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, restBindingPattern, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @param context COntext in which the typed binding pattern occurs * @return Parsed node */ private STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } else { STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; default: return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList(boolean possibleMappingField) { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseMemberBracketedList(false); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } if (peek(2).kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPattern(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(nextToken, ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); return parseStatementStartBracketedListMember(); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
class BallerinaParser extends AbstractParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT; protected BallerinaParser(AbstractTokenReader tokenReader) { super(tokenReader, new BallerinaParserErrorHandler(tokenReader)); } /** * Start parsing the given input. * * @return Parsed node */ @Override public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /* * Private methods. */ /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; STToken token = peek(); while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(); if (decl == null) { break; } if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { updateLastNodeInListWithInvalidNode(otherDecls, decl, DiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ protected STNode parseTopLevelNode() { STToken nextToken = peek(); STNode metadata; switch (nextToken.kind) { case EOF_TOKEN: return null; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: case ENUM_KEYWORD: case TRANSACTIONAL_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(STNodeFactory.createEmptyNode(), null); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createEmptyNodeList(); break; } return parseTopLevelNode(); } return parseTopLevelNode(metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param metadata Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); STNode qualifier = null; switch (nextToken.kind) { case EOF_TOKEN: if (metadata != null) { addInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA); } return null; case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case ENUM_KEYWORD: case TRANSACTIONAL_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(metadata); } return parseTopLevelNode(metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } switch (peek(lookahead + 2).kind) { case IDENTIFIER_TOKEN: return isModuleVarDeclStart(lookahead + 2); case EOF_TOKEN: return true; default: return false; } default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.startMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STNode importDecl = parseImportDecl(importKeyword, identifier); this.tokenReader.endMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IMPORT_KEYWORD); return parseImportKeyword(); } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else if (token.kind == SyntaxKind.MAP_KEYWORD) { STToken mapKeyword = consume(); return STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); } else { recover(token, currentCtx); return parseIdentifier(currentCtx); } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); STNode orgName; STNode moduleName; STNode version; STNode alias; switch (nextToken.kind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); return parseImportDecl(importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SLASH); return parseSlashToken(); } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken token = peek(); if (token.kind == SyntaxKind.DOT_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.DOT); return parseDotToken(); } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @return Parsed node */ private STNode parseModuleName(STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); STToken nextToken = peek(); while (!isEndOfImportModuleName(nextToken)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextToken = peek(); } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(STToken nextToken) { return nextToken.kind != SyntaxKind.DOT_TOKEN && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(STToken nextToken) { switch (nextToken.kind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); switch (nextToken.kind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeyword(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextToken)) { return STNodeFactory.createEmptyNode(); } recover(peek(), ParserRuleContext.IMPORT_VERSION_DECL); return parseVersion(); } } /** * Parse version keyword. * * @return Parsed node */ private STNode parseVersionKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.VERSION_KEYWORD); return parseVersionKeyword(); } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); STNode majorVersion; switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: majorVersion = parseMajorVersion(); break; default: recover(peek(), ParserRuleContext.VERSION_NUMBER); return parseVersionNumber(); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersionEnd = parseSubVersionEnd(); if (minorVersionEnd != null) { versionParts.add(minorVersionEnd); STNode minorVersion = parseMinorVersion(); versionParts.add(minorVersion); STNode patchVersionEnd = parseSubVersionEnd(); if (patchVersionEnd != null) { versionParts.add(patchVersionEnd); STNode patchVersion = parsePatchVersion(); versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseDecimalIntLiteral(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { return consume(); } else { recover(peek(), context); return parseDecimalIntLiteral(context); } } private STNode parseSubVersionEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case AS_KEYWORD: case SEMICOLON_TOKEN: case EOF_TOKEN: return null; case DOT_TOKEN: return parseDotToken(); default: recover(nextToken, ParserRuleContext.IMPORT_SUB_VERSION); return parseSubVersionEnd(); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken nextToken = peek(); switch (nextToken.kind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextToken)) { return STNodeFactory.createEmptyNode(); } recover(peek(), ParserRuleContext.IMPORT_PREFIX_DECL); return parseImportPrefixDecl(); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.AS_KEYWORD); return parseAsKeyword(); } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.IMPORT_PREFIX); return parseImportPrefix(); } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: reportInvalidQualifier(qualifier); return null; case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: List<STNode> qualifiers = new ArrayList<>(); if (qualifier != null) { qualifiers.add(qualifier); } return parseFuncDefOrFuncTypeDesc(ParserRuleContext.TOP_LEVEL_FUNC_DEF_OR_FUNC_TYPE_DESC, metadata, false, qualifiers); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamespaceDeclaration(true); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case ENUM_KEYWORD: return parseEnumDeclaration(metadata, getQualifier(qualifier)); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER, qualifier.toString().trim()); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.PUBLIC_KEYWORD); return parseQualifier(); } } private STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode qualifiers) { startContext(ParserRuleContext.FUNC_DEF); STNode functionKeyword = parseFunctionKeyword(); STNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers); return funcDef; } /** * Parse function definition for the function type descriptor. * <p> * <code> * function-defn := FUNCTION identifier function-signature function-body * <br/> * function-type-descriptor := function function-signature * </code> * * @param metadata Metadata * @param qualifiers qualifier list * @return Parsed node */ private STNode parseFuncDefOrFuncTypeDesc(ParserRuleContext context, STNode metadata, boolean isObjectMember, List<STNode> qualifiers) { STNode qualifierList = parseFunctionQualifiers(context, qualifiers); return parseFuncDefOrFuncTypeDesc(metadata, isObjectMember, qualifierList); } private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMember, STNode qualifiers) { startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE); STNode functionKeyword = parseFunctionKeyword(); STNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMember, qualifiers); return funcDefOrType; } private STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef, boolean isObjectMember, STNode qualifiers) { if (isFuncDef) { STNode name = parseFunctionName(); switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; } return parseFunctionKeywordRhs(metadata, functionKeyword, isObjectMember, qualifiers); } private STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isObjectMember, STNode qualifiers) { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode name = parseFunctionName(); switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; case OPEN_PAREN_TOKEN: funcSignature = parseFuncSignature(true); return parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMember, qualifiers); default: STToken token = peek(); recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword, isObjectMember, qualifiers); return parseFunctionKeywordRhs(metadata, functionKeyword, isObjectMember, qualifiers); } } private STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod, STNode name, STNode funcSignature, STNode qualifierList) { STNode body = parseFunctionBody(isObjectMethod); if (body.kind == SyntaxKind.SEMICOLON_TOKEN) { return STNodeFactory.createMethodDeclarationNode(metadata, qualifierList, functionKeyword, name, funcSignature, body); } if (isObjectMethod) { return STNodeFactory.createFunctionDefinitionNode(SyntaxKind.OBJECT_METHOD_DEFINITION, metadata, qualifierList, functionKeyword, name, funcSignature, body); } return STNodeFactory.createFunctionDefinitionNode(SyntaxKind.FUNCTION_DEFINITION, metadata, qualifierList, functionKeyword, name, funcSignature, body); } /** * Parse function signature. * <p> * <code> * function-signature := ( param-list ) return-type-descriptor * <br/> * return-type-descriptor := [ returns [annots] type-descriptor ] * </code> * * @param isParamNameOptional Whether the parameter names are optional * @return Function signature node */ private STNode parseFuncSignature(boolean isParamNameOptional) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode parameters = parseParamList(isParamNameOptional); STNode closeParenthesis = parseCloseParenthesis(); endContext(); STNode returnTypeDesc = parseFuncReturnTypeDescriptor(); return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc); } private STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature, boolean isObjectMember, STNode qualifiers) { STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: endContext(); STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); return parseVarDeclWithFunctionType(typeDesc, isObjectMember, qualifiers, metadata); case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: break; default: if (isValidTypeContinuationToken(nextToken)) { endContext(); typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TOP_LEVEL_FUNC_DEF_OR_FUNC_TYPE_DESC, false); return parseVarDeclWithFunctionType(typeDesc, isObjectMember, qualifiers, metadata); } break; } STNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; } private STNode parseVarDeclWithFunctionType(STNode typeDesc, boolean isObjectMember, STNode qualifiers, STNode metadata) { STNodeList qualifierList = (STNodeList) qualifiers; STNode visibilityQualifier = STNodeFactory.createEmptyNode(); for (int position = 0; position < qualifierList.size(); position++) { STNode qualifier = qualifierList.get(position); if (isObjectMember && isVisibilityQualifier(qualifier)) { visibilityQualifier = qualifier; } else { typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED); } } if (isObjectMember) { STNode readonlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, visibilityQualifier, readonlyQualifier, typeDesc, fieldName); } startContext(ParserRuleContext.VAR_DECL_STMT); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(metadata, STNodeFactory.createEmptyNode(), typedBindingPattern, true); } private boolean isVisibilityQualifier(STNode qualifier) { switch (qualifier.kind) { case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: return true; default: return false; } } /** * Validate the param list and return. If there are params without param-name, * then this method will create a new set of params with missing param-name * and return. * * @param signature Function signature * @return */ private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) { STNode parameters = signature.parameters; int paramCount = parameters.bucketCount(); int index = 0; for (; index < paramCount; index++) { STNode param = parameters.childInBucket(index); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { break; } continue; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { break; } continue; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { break; } continue; default: continue; } break; } if (index == paramCount) { return signature; } STNode updatedParams = getUpdatedParamList(parameters, index); return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams, signature.closeParenToken, signature.returnTypeDesc); } private STNode getUpdatedParamList(STNode parameters, int index) { int paramCount = parameters.bucketCount(); int newIndex = 0; ArrayList<STNode> newParams = new ArrayList<>(); for (; newIndex < index; newIndex++) { newParams.add(parameters.childInBucket(index)); } for (; newIndex < paramCount; newIndex++) { STNode param = parameters.childInBucket(newIndex); STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { param = STNodeFactory .createRequiredParameterNode(requiredParam.annotations, requiredParam.typeName, paramName); } break; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { param = STNodeFactory .createDefaultableParameterNode(defaultableParam.annotations, defaultableParam.typeName, paramName, defaultableParam.equalsToken, defaultableParam.expression); } break; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { param = STNodeFactory.createRestParameterNode(restParam.annotations, restParam.typeName, restParam.ellipsisToken, paramName); } break; default: break; } newParams.add(param); } return STNodeFactory.createNodeList(newParams); } private boolean isEmpty(STNode node) { return !SyntaxUtils.isSTNodePresent(node); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_KEYWORD); return parseFunctionKeyword(); } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.FUNC_NAME); return parseFunctionName(); } } /** * Parse open parenthesis. * * @param ctx Context of the parenthesis * @return Parsed node */ private STNode parseOpenParenthesis(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { recover(token, ctx); return parseOpenParenthesis(ctx); } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return parseCloseParenthesis(); } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParamList(boolean isParamNameOptional) { startContext(ParserRuleContext.PARAM_LIST); STToken token = peek(); if (isEndOfParametersList(token.kind)) { return STNodeFactory.createEmptyNodeList(); } ArrayList<STNode> paramsList = new ArrayList<>(); startContext(ParserRuleContext.REQUIRED_PARAM); STNode firstParam = parseParameter(SyntaxKind.REQUIRED_PARAM, isParamNameOptional); SyntaxKind prevParamKind = firstParam.kind; paramsList.add(firstParam); boolean paramOrderErrorPresent = false; token = peek(); while (!isEndOfParametersList(token.kind)) { STNode paramEnd = parseParameterRhs(); if (paramEnd == null) { break; } endContext(); if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) { startContext(ParserRuleContext.DEFAULTABLE_PARAM); } else { startContext(ParserRuleContext.REQUIRED_PARAM); } STNode param = parseParameter(prevParamKind, isParamNameOptional); if (paramOrderErrorPresent) { updateLastNodeInListWithInvalidNode(paramsList, paramEnd, null); updateLastNodeInListWithInvalidNode(paramsList, param, null); } else { DiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind); if (paramOrderError == null) { paramsList.add(paramEnd); paramsList.add(param); } else { paramOrderErrorPresent = true; updateLastNodeInListWithInvalidNode(paramsList, paramEnd, paramOrderError); updateLastNodeInListWithInvalidNode(paramsList, param, null); } } prevParamKind = param.kind; token = peek(); } endContext(); return STNodeFactory.createNodeList(paramsList); } /** * Return the appropriate {@code DiagnosticCode} if there are parameter order issues. * * @param param the new parameter * @param prevParamKind the SyntaxKind of the previously added parameter */ private DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) { if (prevParamKind == SyntaxKind.REST_PARAM) { return DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER; } else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) { return DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER; } else { return null; } } private boolean isNodeWithSyntaxKindInList(List<STNode> nodeList, SyntaxKind kind) { for (STNode node : nodeList) { if (node.kind == kind) { return true; } } return false; } private STNode parseParameterRhs() { return parseParameterRhs(peek().kind); } private STNode parseParameterRhs(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.PARAM_END); return parseParameterRhs(); } } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param prevParamKind Kind of the parameter that precedes current parameter * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParameter(SyntaxKind prevParamKind, boolean isParamNameOptional) { STNode annots; STToken nextToken = peek(); switch (nextToken.kind) { case AT_TOKEN: annots = parseOptionalAnnotations(); break; case IDENTIFIER_TOKEN: annots = STNodeFactory.createEmptyNodeList(); break; default: if (isTypeStartingToken(nextToken.kind)) { annots = STNodeFactory.createEmptyNodeList(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, isParamNameOptional); if (solution.action == Action.KEEP) { annots = STNodeFactory.createEmptyNodeList(); break; } return parseParameter(prevParamKind, isParamNameOptional); } STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode param = parseAfterParamType(prevParamKind, annots, type, isParamNameOptional); return param; } private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode annots, STNode type, boolean isParamNameOptional) { STNode paramName; STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: switchContext(ParserRuleContext.REST_PARAM); STNode ellipsis = parseEllipsis(); if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { paramName = STNodeFactory.createEmptyNode(); } else { paramName = parseVariableName(); } return STNodeFactory.createRestParameterNode(annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(prevParamKind, annots, type, paramName); case EQUAL_TOKEN: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, annots, type, paramName); default: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, annots, type, paramName); } recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, annots, type, isParamNameOptional); return parseAfterParamType(prevParamKind, annots, type, isParamNameOptional); } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ELLIPSIS); return parseEllipsis(); } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param prevParamKind Kind of the parameter that precedes current parameter * @param annots Annotations attached to the parameter * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode annots, STNode type, STNode paramName) { STToken nextToken = peek(); if (isEndOfParameter(nextToken.kind)) { return STNodeFactory.createRequiredParameterNode(annots, type, paramName); } else if (nextToken.kind == SyntaxKind.EQUAL_TOKEN) { if (prevParamKind == SyntaxKind.REQUIRED_PARAM) { switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(annots, type, paramName, equal, expr); } else { recover(nextToken, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, annots, type, paramName); return parseParameterRhs(prevParamKind, annots, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.COMMA); return parseComma(); } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseFuncReturnTypeDescriptor() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: return STNodeFactory.createEmptyNode(); case RETURNS_KEYWORD: break; default: STToken nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) { break; } return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = parseReturnsKeyword(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse 'returns' keyword. * * @return Return-keyword node */ private STNode parseReturnsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETURNS_KEYWORD); return parseReturnsKeyword(); } } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor(ParserRuleContext context) { return parseTypeDescriptor(context, false, false); } private STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) { return parseTypeDescriptor(context, false, isInConditionalExpr); } private STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern, boolean isInConditionalExpr) { startContext(context); STNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr); endContext(); return typeDesc; } private STNode parseTypeDescriptorWithoutContext(ParserRuleContext context, boolean isInConditionalExpr) { return parseTypeDescriptorInternal(context, false, isInConditionalExpr); } private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern, boolean isInConditionalExpr) { STNode typeDesc = parseTypeDescriptorInternal(context, isInConditionalExpr); if (typeDesc.kind == SyntaxKind.VAR_TYPE_DESC && context != ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN) { STToken missingToken = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); missingToken = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(missingToken, typeDesc, DiagnosticErrorCode.ERROR_INVALID_USAGE_OF_VAR); typeDesc = STNodeFactory.createSimpleNameReferenceNode(missingToken); } return parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: if (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION && !isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) && isValidExprStart(getNextNextToken(nextToken.kind).kind)) { return typeDesc; } return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context, isTypedBindingPattern); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { return typeDesc; } return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern); case PIPE_TOKEN: return parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern); case BITWISE_AND_TOKEN: return parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern); default: return typeDesc; } } private boolean isValidTypeContinuationToken(STToken nextToken) { switch (nextToken.kind) { case QUESTION_MARK_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: return true; default: return false; } } private STNode validateForUsageOfVar(STNode typeDesc) { if (typeDesc.kind != SyntaxKind.VAR_TYPE_DESC) { return typeDesc; } STToken missingToken = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); missingToken = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(missingToken, typeDesc, DiagnosticErrorCode.ERROR_INVALID_USAGE_OF_VAR); return STNodeFactory.createSimpleNameReferenceNode(missingToken); } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param context Current context * @param isInConditionalExpr * @return Parsed node */ private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseTypeReference(isInConditionalExpr); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case READONLY_KEYWORD: STToken nextNextToken = getNextNextToken(nextToken.kind); SyntaxKind nextNextTokenKind = nextNextToken.kind; if (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD && nextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD && nextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) { return parseSimpleTypeDescriptor(); } case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilOrParenthesisedTypeDesc(); case MAP_KEYWORD: case FUTURE_KEYWORD: return parseParameterizedTypeDescriptor(); case TYPEDESC_KEYWORD: return parseTypedescTypeDescriptor(); case ERROR_KEYWORD: return parseErrorTypeDescriptor(); case XML_KEYWORD: return parseXmlTypeDescriptor(); case STREAM_KEYWORD: return parseStreamTypeDescriptor(); case TABLE_KEYWORD: return parseTableTypeDescriptor(); case FUNCTION_KEYWORD: return parseFunctionTypeDesc(); case OPEN_BRACKET_TOKEN: return parseTupleTypeDesc(); case DISTINCT_KEYWORD: return parseDistinctTypeDesc(context); default: if (isSingletonTypeDescStart(nextToken.kind, true)) { return parseSingletonTypeDesc(); } if (isSimpleType(nextToken.kind)) { return parseSimpleTypeDescriptor(); } Solution solution = recover(nextToken, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr); if (solution.action == Action.KEEP) { return parseSingletonTypeDesc(); } return parseTypeDescriptorInternal(context, isInConditionalExpr); } } /** * Parse distinct type descriptor. * <p> * <code> * distinct-type-descriptor := distinct type-descriptor * </code> * * @param context Context in which the type desc is used. * @return Distinct type descriptor */ private STNode parseDistinctTypeDesc(ParserRuleContext context) { STNode distinctKeyword = parseDistinctKeyword(); STNode typeDesc = parseTypeDescriptor(context); return STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc); } private STNode parseDistinctKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DISTINCT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DISTINCT_KEYWORD); return parseDistinctKeyword(); } } private STNode parseNilOrParenthesisedTypeDesc() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); return parseNilOrParenthesisedTypeDescRhs(openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) { STNode closeParen; STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_PAREN_TOKEN: closeParen = parseCloseParenthesis(); return STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen); default: if (isTypeStartingToken(nextToken.kind)) { STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS); closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen); } recover(peek(), ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen); return parseNilOrParenthesisedTypeDescRhs(openParen); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken nextToken = peek(); if (isSimpleType(nextToken.kind)) { STToken token = consume(); return createBuiltinSimpleNameReference(token); } else { recover(nextToken, ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return parseSimpleTypeDescriptor(); } } private STNode createBuiltinSimpleNameReference(STNode token) { SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init named-worker-decl+] default-worker } * </code> * * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ protected STNode parseFunctionBody(boolean isObjectMethod) { switch (peek().kind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(false); case RIGHT_DOUBLE_ARROW_TOKEN: return parseExpressionFuncBody(false, false); case SEMICOLON_TOKEN: if (isObjectMethod) { return parseSemicolon(); } default: STToken token = peek(); recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod); return parseFunctionBody(isObjectMethod); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function * @return Parsed node */ private STNode parseFunctionBodyBlock(boolean isAnonFunc) { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { updateLastNodeInListWithInvalidNode(secondStmtList, stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) { if (isAnonFunc) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case EQUAL_TOKEN: case BACKTICK_TOKEN: return true; default: break; } } return isEndOfStatements(); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case TYPE_KEYWORD: case PUBLIC_KEYWORD: default: return endOfModuleLevelNode(1); } } private boolean isEndOfObjectTypeNode() { return endOfModuleLevelNode(1, true); } private boolean isEndOfStatements() { switch (peek().kind) { case RESOURCE_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } private boolean endOfModuleLevelNode(int peekIndex) { return endOfModuleLevelNode(peekIndex, false); } private boolean endOfModuleLevelNode(int peekIndex, boolean isObject) { switch (peek(peekIndex).kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case LISTENER_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); case PUBLIC_KEYWORD: return endOfModuleLevelNode(peekIndex + 1, isObject); case FUNCTION_KEYWORD: if (isObject) { return false; } return peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN; default: return false; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case AT_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.VARIABLE_NAME); return parseVariableName(); } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPEN_BRACE); return parseOpenBrace(); } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_BRACE); return parseCloseBrace(); } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); return parseExternalFuncBodyRhs(assign); } private STNode parseExternalFuncBodyRhs(STNode assign) { STNode annotation; STToken nextToken = peek(); switch (nextToken.kind) { case AT_TOKEN: annotation = parseAnnotations(); break; case EXTERNAL_KEYWORD: annotation = STNodeFactory.createEmptyNodeList(); break; default: recover(nextToken, ParserRuleContext.EXTERNAL_FUNC_BODY_OPTIONAL_ANNOTS, assign); return parseExternalFuncBodyRhs(assign); } STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SEMICOLON); return parseSemicolon(); } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return parseExternalKeyword(); } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ASSIGN_OP); return parseAssignOp(); } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.BINARY_OPERATOR); return parseBinaryOperator(); } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case PERCENT_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: case PERCENT_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.REMOTE_CALL_ACTION; case RIGHT_DOUBLE_ARROW_TOKEN: return OperatorPrecedence.ANON_FUNC_OR_LET; case SYNC_SEND_TOKEN: return OperatorPrecedence.ACTION; case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return OperatorPrecedence.SHIFT; case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: return OperatorPrecedence.RANGE; case ELVIS_TOKEN: return OperatorPrecedence.ELVIS_CONDITIONAL; case QUESTION_MARK_TOKEN: case COLON_TOKEN: return OperatorPrecedence.CONDITIONAL; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case DEFAULT: case UNARY: case ACTION: case EXPRESSION_ACTION: case REMOTE_CALL_ACTION: case ANON_FUNC_OR_LET: case QUERY: case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case SHIFT: return SyntaxKind.DOUBLE_LT_TOKEN; case RANGE: return SyntaxKind.ELLIPSIS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; case ELVIS_CONDITIONAL: return SyntaxKind.ELVIS_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPE_KEYWORD); return parseTypeKeyword(); } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.TYPE_NAME); return parseTypeName(); } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); STNode recordRestDescriptor = null; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { break; } token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { recordRestDescriptor = field; break; } recordFields.add(field); } while (recordRestDescriptor != null && !isEndOfRecordTypeNode(token.kind)) { STNode invalidField = parseFieldOrRestDescriptor(isInclusive); recordRestDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(recordRestDescriptor, invalidField, DiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD); token = peek(); } STNode fields = STNodeFactory.createNodeList(recordFields); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, recordRestDescriptor, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: recover(nextToken, ParserRuleContext.RECORD_BODY_START); return parseRecordBodyStartDelimiter(); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return parseClosedRecordBodyStart(); } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { if (startingDelimeter == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return parseClosedRecordBodyEnd(); } return parseCloseBrace(); } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return parseClosedRecordBodyEnd(); } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RECORD_KEYWORD); return parseRecordKeyword(); } } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case DOCUMENTATION_STRING: case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(); nextToken = peek(); return parseRecordField(nextToken, isInclusive, metadata); default: if (isTypeStartingToken(nextToken.kind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = STNodeFactory.createEmptyNode(); return parseRecordField(nextToken, isInclusive, metadata); } recover(peek(), ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); return parseFieldOrRestDescriptor(isInclusive); } } private STNode parseRecordField(STToken nextToken, boolean isInclusive, STNode metadata) { if (nextToken.kind != SyntaxKind.READONLY_KEYWORD) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type); endContext(); return fieldOrRestDesc; } STNode type; STNode fieldOrRestDesc; STNode readOnlyQualifier; readOnlyQualifier = parseReadonlyKeyword(); nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME); if (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { type = fieldNameOrTypeDesc; } else { nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case EQUAL_TOKEN: type = createBuiltinSimpleNameReference(readOnlyQualifier); readOnlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name; return parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName); default: type = parseComplexTypeDescriptor(fieldNameOrTypeDesc, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); break; } } } else if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { type = createBuiltinSimpleNameReference(readOnlyQualifier); fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type); endContext(); return fieldOrRestDesc; } else if (isTypeStartingToken(nextToken.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); } else { readOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier); type = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); readOnlyQualifier = STNodeFactory.createEmptyNode(); } fieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type); endContext(); return fieldOrRestDesc; } private STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) { if (isInclusive) { STNode readOnlyQualifier = STNodeFactory.createEmptyNode(); return parseIndividualRecordField(metadata, readOnlyQualifier, type); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } private STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName); } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { STNode typeReference = parseTypeDescriptor(ParserRuleContext.TYPE_REFERENCE); if (typeReference.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (typeReference.hasDiagnostics()) { STNode emptyNameReference = STNodeFactory.createSimpleNameReferenceNode (SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER)); return emptyNameReference; } return typeReference; } if (typeReference.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return typeReference; } STNode emptyNameReference = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); emptyNameReference = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(emptyNameReference, typeReference, DiagnosticErrorCode.ONLY_TYPE_REFERENCE_ALLOWED_HERE_AS_TYPE_INCLUSIONS); return emptyNameReference; } private STNode parseTypeReference(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { return parseQualifiedIdentifier(currentCtx, false); } private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) { STToken token = peek(); STNode typeRefOrPkgRef; if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { typeRefOrPkgRef = consume(); } else { recover(token, currentCtx, isInConditionalExpr); if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseQualifiedIdentifier(currentCtx, isInConditionalExpr); } typeRefOrPkgRef = consume(); } return parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr); } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case IDENTIFIER_TOKEN: STToken colon = consume(); STNode varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); case MAP_KEYWORD: colon = consume(); STToken mapKeyword = consume(); STNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName); case COLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseQualifiedIdentifier(identifier, isInConditionalExpr); default: if (isInConditionalExpr) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } colon = consume(); varOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode readonlyQualifier = STNodeFactory.createEmptyNode(); return parseIndividualRecordField(metadata, readonlyQualifier, type); default: recover(nextToken, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName, equalsToken, expression, semicolonToken); default: recover(nextToken, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier, type, fieldName); return parseFieldDescriptorRhs(metadata, readonlyQualifier, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.QUESTION_MARK); return parseQuestionMark(); } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { ArrayList<STNode> stmts = new ArrayList<>(); return parseStatements(stmts); } private STNode parseStatements(ArrayList<STNode> stmts) { while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE); break; } stmts.add(stmt); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken nextToken = peek(); STNode annots = null; switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseStatement(); case AT_TOKEN: annots = parseOptionalAnnotations(); break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(nextToken.kind)) { break; } if (isValidExpressionStart(nextToken.kind, 1)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.KEEP) { break; } return parseStatement(); } return parseStatement(annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createEmptyNodeList(); } /** * Parse a single statement, given the next token kind. * * @param annots Annotations * @return Parsed node */ private STNode parseStatement(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: addInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS); return null; case SEMICOLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseStatement(annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case DO_KEYWORD: return parseDoStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case FAIL_KEYWORD: return parseFailStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseStatementStartsWithOpenBrace(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); case START_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case FROM_KEYWORD: case COMMIT_KEYWORD: return parseExpressionStatement(getAnnotations(annots)); case XMLNS_KEYWORD: return parseXMLNamespaceDeclaration(false); case TRANSACTION_KEYWORD: return parseTransactionStatement(); case RETRY_KEYWORD: return parseRetryStatement(); case ROLLBACK_KEYWORD: return parseRollbackStatement(); case OPEN_BRACKET_TOKEN: return parseStatementStartsWithOpenBracket(getAnnotations(annots), false); case FUNCTION_KEYWORD: case OPEN_PAREN_TOKEN: case IDENTIFIER_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_KEYWORD: case XML_KEYWORD: return parseStmtStartsWithTypeOrExpr(getAnnotations(annots)); case MATCH_KEYWORD: return parseMatchStatement(); case ERROR_KEYWORD: return parseErrorTypeDescOrErrorBP(getAnnotations(annots)); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseStatementStartWithExpr(getAnnotations(annots)); } if (isTypeStartingToken(nextToken.kind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.KEEP) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } return parseStatement(annots); } } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar); } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FINAL_KEYWORD); return parseFinalKeyword(); } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: recover(nextToken, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, typedBindingPattern, isModuleVar); return parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, isModuleVar); } endContext(); if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } assert metadata.kind == SyntaxKind.LIST; return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); if (lvExpr.kind == SyntaxKind.FUNCTION_CALL && isPossibleErrorBindingPattern((STFunctionCallExpressionNode) lvExpr)) { lvExpr = getBindingPattern(lvExpr); } boolean lvExprValid = isValidLVExpr(lvExpr); if (!lvExprValid) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); lvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr, DiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS); } return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ protected STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpressionInLhs(STNode annots) { return parseExpression(DEFAULT_OP_PRECEDENCE, annots, false, true, false); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case ERROR_BINDING_PATTERN: return true; case FIELD_ACCESS: return isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression); default: return (expression instanceof STMissingToken); } } private boolean isValidLVMemberExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression); case BRACED_EXPRESSION: return isValidLVMemberExpr(((STBracedExpressionNode) expression).expression); default: return (expression instanceof STMissingToken); } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { return parseExpression(precedenceLevel, isRhsExpr, allowActions, false); } private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { return parseExpression(precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr); } private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STNode expr = parseTerminalExpression(isRhsExpr, allowActions, isInConditionalExpr); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } private STNode attachErrorExpectedActionFoundDiagnostic(STNode node) { return SyntaxErrors.addDiagnostic(node, DiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND); } private STNode parseExpression(OperatorPrecedence precedenceLevel, STNode annots, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode expr = parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr); } private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode annots; if (peek().kind == SyntaxKind.AT_TOKEN) { annots = parseOptionalAnnotations(); } else { annots = STNodeFactory.createEmptyNodeList(); } STNode expr = parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); if (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) { expr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots, DiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION); } return expr; } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param annots Annotations * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr); case OPEN_PAREN_TOKEN: return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr, isInConditionalExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr, isInConditionalExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr); case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: return parseTableConstructorOrQuery(isRhsExpr); case ERROR_KEYWORD: if (peek(2).kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPattern(); } return parseErrorConstructorExpr(); case LET_KEYWORD: return parseLetExpression(isRhsExpr); case BACKTICK_TOKEN: return parseTemplateExpression(); case XML_KEYWORD: STToken nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseXMLTemplateExpression(); } return parseSimpleTypeDescriptor(); case STRING_KEYWORD: nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseStringTemplateExpression(); } return parseSimpleTypeDescriptor(); case FUNCTION_KEYWORD: return parseExplicitFunctionExpression(annots, isRhsExpr); case AT_TOKEN: break; case NEW_KEYWORD: return parseNewExpression(); case START_KEYWORD: return parseStartAction(annots); case FLUSH_KEYWORD: return parseFlushAction(); case LEFT_ARROW_TOKEN: return parseReceiveAction(); case WAIT_KEYWORD: return parseWaitAction(); case COMMIT_KEYWORD: return parseCommitAction(); case TRANSACTIONAL_KEYWORD: return parseTransactionalExpression(); case SERVICE_KEYWORD: return parseServiceConstructorExpression(annots); case BASE16_KEYWORD: case BASE64_KEYWORD: return parseByteArrayLiteral(); default: if (isSimpleType(nextToken.kind)) { return parseSimpleTypeDescriptor(); } break; } Solution solution = recover(nextToken, ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions, isInConditionalExpr); if (solution.action == Action.KEEP) { if (nextToken.kind == SyntaxKind.XML_KEYWORD) { return parseXMLTemplateExpression(); } return parseStringTemplateExpression(); } return parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); } private boolean isValidExprStart(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case AT_TOKEN: case NEW_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case SERVICE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * <p> * Parse a new expression. * </p> * <code> * new-expr := explicit-new-expr | implicit-new-expr * <br/> * explicit-new-expr := new type-descriptor ( arg-list ) * <br/> * implicit-new-expr := new [( arg-list )] * </code> * * @return Parsed NewExpression node. */ private STNode parseNewExpression() { STNode newKeyword = parseNewKeyword(); return parseNewKeywordRhs(newKeyword); } /** * <p> * Parse `new` keyword. * </p> * * @return Parsed NEW_KEYWORD Token. */ private STNode parseNewKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.NEW_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.NEW_KEYWORD); return parseNewKeyword(); } } private STNode parseNewKeywordRhs(STNode newKeyword) { STNode token = peek(); return parseNewKeywordRhs(token.kind, newKeyword); } /** * <p> * Parse an implicit or explicit new expression. * </p> * * @param kind next token kind. * @param newKeyword parsed node for `new` keyword. * @return Parsed new-expression node. */ private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) { switch (kind) { case OPEN_PAREN_TOKEN: return parseImplicitNewRhs(newKeyword); case SEMICOLON_TOKEN: break; case IDENTIFIER_TOKEN: case OBJECT_KEYWORD: case STREAM_KEYWORD: return parseTypeDescriptorInNewExpr(newKeyword); default: break; } return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode()); } /** * <p> * Parse an Explicit New expression. * </p> * <code> * explicit-new-expr := new type-descriptor ( arg-list ) * </code> * * @param newKeyword Parsed `new` keyword. * @return the Parsed Explicit New Expression. */ private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList); } /** * <p> * Parse an <code>implicit-new-expr</code> with arguments. * </p> * * @param newKeyword Parsed `new` keyword. * @return Parsed implicit-new-expr. */ private STNode parseImplicitNewRhs(STNode newKeyword) { STNode implicitNewArgList = parseParenthesizedArgList(); return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList); } /** * <p> * Parse the parenthesized argument list for a <code>new-expr</code>. * </p> * * @return Parsed parenthesized rhs of <code>new-expr</code>. */ private STNode parseParenthesizedArgList() { STNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParan = parseCloseParenthesis(); return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { return parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false); } /** * Parse the right hand side of an expression given the next token kind. * * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @param isInMatchGuard Flag indicating whether this expression is in a match-guard * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STNode actionOrExpression = parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); if (!allowActions && isAction(actionOrExpression) && actionOrExpression.kind != SyntaxKind.BRACED_ACTION) { actionOrExpression = attachErrorExpectedActionFoundDiagnostic(actionOrExpression); } return actionOrExpression; } private STNode parseExpressionRhsInternal(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { SyntaxKind nextTokenKind = peek().kind; if (isEndOfExpression(nextTokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) { return lhsExpr; } if (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) { return lhsExpr; } if (!isValidExprRhsStart(nextTokenKind, lhsExpr.kind)) { return recoverExpressionRhs(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } if (nextTokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) { if (peek(3).kind == SyntaxKind.GT_TOKEN) { nextTokenKind = SyntaxKind.TRIPPLE_GT_TOKEN; } else { nextTokenKind = SyntaxKind.DOUBLE_GT_TOKEN; } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(nextTokenKind); if (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) { return lhsExpr; } STNode newLhsExpr; STNode operator; switch (nextTokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr); break; case SYNC_SEND_TOKEN: newLhsExpr = parseSyncSendAction(lhsExpr); break; case RIGHT_DOUBLE_ARROW_TOKEN: newLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr); break; case ANNOT_CHAINING_TOKEN: newLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr); break; case OPTIONAL_CHAINING_TOKEN: newLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr); break; case QUESTION_MARK_TOKEN: newLhsExpr = parseConditionalExpression(lhsExpr); break; case DOT_LT_TOKEN: newLhsExpr = parseXMLFilterExpression(lhsExpr); break; case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: newLhsExpr = parseXMLStepExpression(lhsExpr); break; default: if (nextTokenKind == SyntaxKind.SLASH_TOKEN && peek(2).kind == SyntaxKind.LT_TOKEN) { SyntaxKind expectedNodeType = getExpectedNodeKind(3, isRhsExpr, isInMatchGuard, lhsExpr.kind); if (expectedNodeType == SyntaxKind.XML_STEP_EXPRESSION) { newLhsExpr = createXMLStepExpression(lhsExpr); break; } } if (nextTokenKind == SyntaxKind.DOUBLE_GT_TOKEN) { operator = parseSignedRightShiftToken(); } else if (nextTokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) { operator = parseUnsignedRightShiftToken(); } else { operator = parseBinaryOperator(); } if (isAction(lhsExpr) && lhsExpr.kind != SyntaxKind.BRACED_ACTION) { lhsExpr = attachErrorExpectedActionFoundDiagnostic(lhsExpr); } STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhsInternal(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } private STNode recoverExpressionRhs(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); if (solution.action == Action.REMOVE) { return parseExpressionRhs(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); insertToken(binaryOpKind); return parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } else { return parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } } private STNode createXMLStepExpression(STNode lhsExpr) { STNode newLhsExpr; STNode slashToken = parseSlashToken(); STNode ltToken = parseLTToken(); STNode slashLT; if (hasTrailingMinutiae(slashToken) || hasLeadingMinutiae(ltToken)) { List<STNodeDiagnostic> diagnostics = new ArrayList<>(); diagnostics .add(SyntaxErrors.createDiagnostic(DiagnosticErrorCode.ERROR_INVALID_WHITESPACE_IN_SLASH_LT_TOKEN)); slashLT = STNodeFactory.createMissingToken(SyntaxKind.SLASH_LT_TOKEN, diagnostics); slashLT = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(slashLT, slashToken); slashLT = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(slashLT, ltToken); } else { slashLT = STNodeFactory.createToken(SyntaxKind.SLASH_LT_TOKEN, slashToken.leadingMinutiae(), ltToken.trailingMinutiae()); } STNode namePattern = parseXMLNamePatternChain(slashLT); newLhsExpr = STNodeFactory.createXMLStepExpressionNode(lhsExpr, namePattern); return newLhsExpr; } private SyntaxKind getExpectedNodeKind(int lookahead, boolean isRhsExpr, boolean isInMatchGuard, SyntaxKind precedingNodeKind) { STToken nextToken = peek(lookahead); switch (nextToken.kind) { case ASTERISK_TOKEN: return SyntaxKind.XML_STEP_EXPRESSION; case GT_TOKEN: break; case PIPE_TOKEN: return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); case IDENTIFIER_TOKEN: nextToken = peek(++lookahead); switch (nextToken.kind) { case GT_TOKEN: break; case PIPE_TOKEN: return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); case COLON_TOKEN: nextToken = peek(++lookahead); switch (nextToken.kind) { case ASTERISK_TOKEN: case GT_TOKEN: return SyntaxKind.XML_STEP_EXPRESSION; case IDENTIFIER_TOKEN: nextToken = peek(++lookahead); if (nextToken.kind == SyntaxKind.PIPE_TOKEN) { return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } nextToken = peek(++lookahead); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case PLUS_TOKEN: case MINUS_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: return SyntaxKind.XML_STEP_EXPRESSION; default: if (isValidExpressionStart(nextToken.kind, lookahead)) { break; } return SyntaxKind.XML_STEP_EXPRESSION; } return SyntaxKind.TYPE_CAST_EXPRESSION; } private boolean hasTrailingMinutiae(STNode node) { return node.widthWithTrailingMinutiae() > node.width(); } private boolean hasLeadingMinutiae(STNode node) { return node.widthWithLeadingMinutiae() > node.width(); } private boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: return precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE || precedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case QUESTION_MARK_TOKEN: case COLON_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @param isRhsExpr Is this is a rhs expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) { startContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR); STNode openBracket = parseOpenBracket(); STNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr); STNode closeBracket = parseCloseBracket(); endContext(); if (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) { STNode missingVarRef = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); keyExpr = STNodeFactory.createNodeList(missingVarRef); closeBracket = SyntaxErrors.addDiagnostic(closeBracket, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); } return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse key expression of a member access expression. A type descriptor * that starts with a type-ref (e.g: T[a][b]) also goes through this * method. * <p> * <code>key-expression := single-key-expression | multi-key-expression</code> * * @param isRhsExpr Is this is a rhs expression * @return Key expression */ private STNode parseMemberAccessKeyExprs(boolean isRhsExpr) { List<STNode> exprList = new ArrayList<>(); STNode keyExpr; STNode keyExprEnd; while (!isEndOfTypeList(peek().kind)) { keyExpr = parseKeyExpr(isRhsExpr); exprList.add(keyExpr); keyExprEnd = parseMemberAccessKeyExprEnd(); if (keyExprEnd == null) { break; } exprList.add(keyExprEnd); } return STNodeFactory.createNodeList(exprList); } private STNode parseKeyExpr(boolean isRhsExpr) { if (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) { return STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_LITERAL, consume()); } return parseExpression(isRhsExpr); } private STNode parseMemberAccessKeyExprEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END); return parseMemberAccessKeyExprEnd(); } } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_BRACKET); return parseCloseBracket(); } } /** * Parse field access, xml required attribute access expressions or method call expression. * <p> * <code> * field-access-expr := expression . field-name * <br/> * xml-required-attribute-access-expr := expression . xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * method-call-expr := expression . method-name ( arg-list ) * </code> * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) { STNode dotToken = parseDotToken(); STToken token = peek(); if (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) { STNode methodName = getKeywordAsSimpleNameRef(); STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args, closeParen); } STNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr); if (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } private STNode getKeywordAsSimpleNameRef() { STToken mapKeyword = consume(); STNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); methodName = STNodeFactory.createSimpleNameReferenceNode(methodName); return methodName; } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); if (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteralOrEmptyAnonFuncParamRhs(openParen); } startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); STNode expr; if (allowActions) { expr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } else { expr = parseExpression(isRhsExpr); } return parseBracedExprOrAnonFuncParamRhs(openParen, expr, isRhsExpr); } private STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) { STNode closeParen = parseCloseParenthesis(); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return STNodeFactory.createNilLiteralNode(openParen, closeParen); } else { STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return anonFuncParam; } } private STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) { STToken nextToken = peek(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { switch (nextToken.kind) { case CLOSE_PAREN_TOKEN: break; case COMMA_TOKEN: return parseImplicitAnonFunc(openParen, expr, isRhsExpr); default: recover(nextToken, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen, expr, isRhsExpr); return parseBracedExprOrAnonFuncParamRhs(openParen, expr, isRhsExpr); } } STNode closeParen = parseCloseParenthesis(); endContext(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard, SyntaxKind precedingNodeKind) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } if (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return true; } return !isValidExprRhsStart(tokenKind, precedingNodeKind); } switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case RESOURCE_KEYWORD: case EQUAL_TOKEN: case DOCUMENTATION_STRING: case AT_TOKEN: case AS_KEYWORD: case IN_KEYWORD: case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case DO_KEYWORD: case COLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: case LIMIT_KEYWORD: case JOIN_KEYWORD: case OUTER_KEYWORD: case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: case EQUALS_KEYWORD: return true; case RIGHT_DOUBLE_ARROW_TOKEN: return isInMatchGuard; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STNode literalToken = consume(); return parseBasicLiteral(literalToken); } private STNode parseBasicLiteral(STNode literalToken) { SyntaxKind nodeKind; switch (literalToken.kind) { case NULL_KEYWORD: nodeKind = SyntaxKind.NULL_LITERAL; break; case TRUE_KEYWORD: case FALSE_KEYWORD: nodeKind = SyntaxKind.BOOLEAN_LITERAL; break; case DECIMAL_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: nodeKind = SyntaxKind.NUMERIC_LITERAL; break; case STRING_LITERAL_TOKEN: nodeKind = SyntaxKind.STRING_LITERAL; break; case ASTERISK_TOKEN: nodeKind = SyntaxKind.ASTERISK_LITERAL; break; default: nodeKind = literalToken.kind; } return STNodeFactory.createBasicLiteralNode(nodeKind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * <p> * Parse error constructor expression. * </p> * <code> * error-constructor-expr := error ( arg-list ) * </code> * * @return Error constructor expression */ private STNode parseErrorConstructorExpr() { STNode errorKeyword = parseErrorKeyword(); errorKeyword = createBuiltinSimpleNameReference(errorKeyword); return parseFuncCall(errorKeyword); } /** * Parse function call argument list. * * @return Parsed args list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createEmptyNodeList(); endContext(); return args; } STNode firstArg = parseArgument(); STNode argsList = parseArgList(firstArg); endContext(); return argsList; } /** * Parse follow up arguments. * * @param firstArg first argument in the list * @return the argument list */ private STNode parseArgList(STNode firstArg) { ArrayList<STNode> argsList = new ArrayList<>(); argsList.add(firstArg); SyntaxKind lastValidArgKind = firstArg.kind; STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { STNode argEnd = parseArgEnd(); if (argEnd == null) { break; } STNode curArg = parseArgument(); DiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind); if (errorCode == null) { argsList.add(argEnd); argsList.add(curArg); lastValidArgKind = curArg.kind; } else if (errorCode == DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG && isMissingPositionalArg(curArg)) { argsList.add(argEnd); argsList.add(curArg); } else { updateLastNodeInListWithInvalidNode(argsList, argEnd, null); updateLastNodeInListWithInvalidNode(argsList, curArg, errorCode); } nextToken = peek(); } return STNodeFactory.createNodeList(argsList); } private DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) { DiagnosticErrorCode errorCode = null; switch (prevArgKind) { case POSITIONAL_ARG: break; case NAMED_ARG: if (curArgKind == SyntaxKind.POSITIONAL_ARG) { errorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG; } break; case REST_ARG: errorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; break; default: throw new IllegalStateException("Invalid SyntaxKind in an argument"); } return errorCode; } private boolean isMissingPositionalArg(STNode arg) { STNode expr = ((STPositionalArgumentNode) arg).expression; return expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE && ((STSimpleNameReferenceNode) expr).name.isMissing(); } private STNode parseArgEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ARG_END); return parseArgEnd(); } } /** * Parse function call argument. * * @return Parsed argument node */ private STNode parseArgument() { STNode arg; STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(); break; default: if (isValidExprStart(nextToken.kind)) { expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(expr); break; } recover(peek(), ParserRuleContext.ARG_START); return parseArgument(); } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @return Parsed argument node */ private STNode parseNamedOrPositionalArg() { STNode argNameOrExpr = parseTerminalExpression(true, false, false); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = parseAssignOp(); STNode valExpr = parseExpression(); return STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: return STNodeFactory.createPositionalArgumentNode(argNameOrExpr); default: argNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, true, false); return STNodeFactory.createPositionalArgumentNode(argNameOrExpr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STNode firstQualifier; STToken nextToken = peek(); switch (nextToken.kind) { case CLIENT_KEYWORD: firstQualifier = parseClientKeyword(); break; case ABSTRACT_KEYWORD: firstQualifier = parseAbstractKeyword(); break; case READONLY_KEYWORD: firstQualifier = parseReadonlyKeyword(); break; case OBJECT_KEYWORD: return STNodeFactory.createEmptyNodeList(); default: recover(nextToken, ParserRuleContext.OBJECT_TYPE_QUALIFIER); return parseObjectTypeQualifiers(); } return parseObjectTypeNextQualifiers(firstQualifier); } private STNode parseObjectTypeNextQualifiers(STNode firstQualifier) { List<STNode> qualifiers = new ArrayList<>(); qualifiers.add(firstQualifier); for (int i = 0; i < 2; i++) { STNode nextToken = peek(); if (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) { nextToken = consume(); updateLastNodeInListWithInvalidNode(qualifiers, nextToken, DiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER); continue; } STNode nextQualifier; switch (nextToken.kind) { case CLIENT_KEYWORD: nextQualifier = parseClientKeyword(); break; case ABSTRACT_KEYWORD: nextQualifier = parseAbstractKeyword(); break; case READONLY_KEYWORD: nextQualifier = parseReadonlyKeyword(); break; case OBJECT_KEYWORD: default: return STNodeFactory.createNodeList(qualifiers); } qualifiers.add(nextQualifier); } return STNodeFactory.createNodeList(qualifiers); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CLIENT_KEYWORD); return parseClientKeyword(); } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return parseAbstractKeyword(); } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.OBJECT_KEYWORD); return parseObjectKeyword(); } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); while (!isEndOfObjectTypeNode()) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(); endContext(); if (member == null) { break; } objectMembers.add(member); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } recover(peek(), ParserRuleContext.OBJECT_MEMBER_START); return parseObjectMember(); } return parseObjectMemberWithoutMeta(metadata); } private STNode parseObjectMemberWithoutMeta(STNode metadata) { STNode member; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: member = parseObjectMethod(metadata, new ArrayList<>()); break; default: if (isTypeStartingToken(nextToken.kind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA, metadata); return parseObjectMemberWithoutMeta(metadata); } return member; } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.OBJECT_MEMBER_QUALIFIER); return parseObjectMemberVisibility(); } } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parse object member node */ private STNode parseObjectMethodOrField(STNode metadata, STNode visibilityQualifier) { STToken nextToken = peek(1); List<STNode> qualifiers = new ArrayList<>(); switch (nextToken.kind) { case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: if (visibilityQualifier != null) { qualifiers.add(visibilityQualifier); } return parseObjectMethod(metadata, qualifiers); case IDENTIFIER_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifier); } break; default: if (isTypeStartingToken(nextToken.kind)) { return parseObjectField(metadata, visibilityQualifier); } break; } recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifier); return parseObjectMethodOrField(metadata, visibilityQualifier); } /** * Parse function qualifiers. * * @return Parsed node */ private STNode parseFunctionQualifiers(ParserRuleContext context, List<STNode> qualifierList) { STToken nextToken = peek(); while (!isEndOfFunctionQualifiers(nextToken.kind)) { STNode qualifier; switch (nextToken.kind) { case REMOTE_KEYWORD: qualifier = parseRemoteKeyword(); break; case TRANSACTIONAL_KEYWORD: qualifier = parseTransactionalKeyword(); break; case RESOURCE_KEYWORD: qualifier = parseResourceKeyword(); break; default: recover(peek(), context, context, qualifierList); return parseFunctionQualifiers(context, qualifierList); } DiagnosticCode diagnosticCode = validateFunctionQualifier(qualifier, context, qualifierList); if (diagnosticCode != null) { if (qualifierList.size() == 0) { addInvalidNodeToNextToken(qualifier, diagnosticCode, qualifier.toString().trim()); } else { updateLastNodeInListWithInvalidNode(qualifierList, qualifier, diagnosticCode); } } else { qualifierList.add(qualifier); } nextToken = peek(); } return STNodeFactory.createNodeList(qualifierList); } private boolean isEndOfFunctionQualifiers(SyntaxKind tokenKind) { switch (tokenKind) { case FUNCTION_KEYWORD: case EOF_TOKEN: return true; default: return false; } } private DiagnosticCode validateFunctionQualifier(STNode currentQualifier, ParserRuleContext context, List<STNode> qualifierList) { switch (currentQualifier.kind) { case REMOTE_KEYWORD: if (context != ParserRuleContext.OBJECT_METHOD_START) { return DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED; } break; case TRANSACTIONAL_KEYWORD: break; default: if (context != ParserRuleContext.RESOURCE_DEF_QUALIFIERS) { return DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED; } } return validateFunctionQualifier(currentQualifier, qualifierList); } private DiagnosticCode validateFunctionQualifier(STNode currentQualifier, List<STNode> qualifierList) { for (STNode node : qualifierList) { if (node.kind == currentQualifier.kind) { return DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER; } } return null; } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_KEYWORD); return parseRemoteKeyword(); } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.READONLY_KEYWORD) { STNode readonlyQualifier = STNodeFactory.createEmptyNode(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); } STNode type; STNode readonlyQualifier = parseReadonlyKeyword(); nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME); if (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { type = fieldNameOrTypeDesc; } else { nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case EQUAL_TOKEN: type = createBuiltinSimpleNameReference(readonlyQualifier); readonlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name; return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); default: type = parseComplexTypeDescriptor(fieldNameOrTypeDesc, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); break; } } } else if (isTypeStartingToken(nextToken.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); } else { readonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier); type = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); readonlyQualifier = STNodeFactory.createEmptyNode(); } STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param readonlyQualifier Readonly qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextToken.kind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: recover(peek(), ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, readonlyQualifier, type, fieldName); return parseObjectFieldRhs(metadata, visibilityQualifier, readonlyQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, List<STNode> qualifiers) { return parseFuncDefOrFuncTypeDesc(ParserRuleContext.OBJECT_METHOD_START, metadata, true, qualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IF_KEYWORD); return parseIfKeyword(); } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ELSE_KEYWORD); return parseElseKeyword(); } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); switch (nextToken.kind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: recover(peek(), ParserRuleContext.ELSE_BODY); return parseElseBody(); } } /** * Parse do statement. * <code>do-stmt := do block-stmt [on-fail-clause]</code> * * @return Do statement */ private STNode parseDoStatement() { startContext(ParserRuleContext.DO_BLOCK); STNode doKeyword = parseDoKeyword(); STNode doBody = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createDoStatementNode(doKeyword, doBody, onFailClause); } /** * Parse while statement. * <code>while-stmt := while expression block-stmt [on-fail-clause]</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody, onFailClause); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHILE_KEYWORD); return parseWhileKeyword(); } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.PANIC_KEYWORD); return parsePanicKeyword(); } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CHECKING_KEYWORD); return parseCheckingKeyword(); } } /** * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONTINUE_KEYWORD); return parseContinueKeyword(); } } /** * Parse fail statement. * <code>fail-stmt := fail expr ;</code> * * @return Fail statement */ private STNode parseFailStatement() { startContext(ParserRuleContext.FAIL_STATEMENT); STNode failKeyword = parseFailKeyword(); STNode expr = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createFailStatementNode(failKeyword, expr, semicolon); } /** * Parse fail keyword. * <p> * <code> * fail-keyword := fail * </code> * * @return Parsed node */ private STNode parseFailKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FAIL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FAIL_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETURN_KEYWORD); return parseReturnKeyword(); } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BREAK_KEYWORD); return parseBreakKeyword(); } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } STNode semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fields = new ArrayList<>(); STNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD); if (field != null) { fields.add(field); } return parseMappingConstructorFields(fields); } private STNode parseMappingConstructorFields(List<STNode> fields) { STToken nextToken; STNode mappingFieldEnd; nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { mappingFieldEnd = parseMappingFieldEnd(); if (mappingFieldEnd == null) { break; } fields.add(mappingFieldEnd); STNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private STNode parseMappingFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_FIELD_END); return parseMappingFieldEnd(); } } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: case READONLY_KEYWORD: return false; case EOF_TOKEN: case DOCUMENTATION_STRING: case AT_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param fieldContext Context of the mapping field * @return Parsed node */ private STNode parseMappingField(ParserRuleContext fieldContext) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return parseSpecificFieldWithOptionalValue(readonlyKeyword); case STRING_LITERAL_TOKEN: readonlyKeyword = STNodeFactory.createEmptyNode(); return parseQualifiedSpecificField(readonlyKeyword); case READONLY_KEYWORD: readonlyKeyword = parseReadonlyKeyword(); return parseSpecificField(readonlyKeyword); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(ellipsis, expr); case CLOSE_BRACE_TOKEN: if (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) { return null; } default: recover(nextToken, fieldContext, fieldContext); return parseMappingField(fieldContext); } } private STNode parseSpecificField(STNode readonlyKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case STRING_LITERAL_TOKEN: return parseQualifiedSpecificField(readonlyKeyword); case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionalValue(readonlyKeyword); default: recover(peek(), ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword); return parseSpecificField(readonlyKeyword); } } private STNode parseQualifiedSpecificField(STNode readonlyKeyword) { STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } /** * Parse mapping constructor specific-field with an optional value. * * @return Parsed node */ private STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(readonlyKeyword, key); } private STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) { STNode colon; STNode valueExpr; STToken nextToken = peek(); switch (nextToken.kind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(nextToken.kind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } recover(nextToken, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key); return parseSpecificFieldRhs(readonlyKeyword, key); } return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); STNode stringLiteral; if (token.kind == SyntaxKind.STRING_LITERAL_TOKEN) { stringLiteral = consume(); } else { recover(token, ParserRuleContext.STRING_LITERAL_TOKEN); return parseStringLiteral(); } return parseBasicLiteral(stringLiteral); } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.COLON); return parseColon(); } } /** * Parse readonly keyword. * * @return Parsed node */ private STNode parseReadonlyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.READONLY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.READONLY_KEYWORD); return parseReadonlyKeyword(); } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @return Parsed node */ private STNode parseComputedField() { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPEN_BRACKET); return parseOpenBracket(); } } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); boolean lvExprValid = isValidLVExpr(lvExpr); if (!lvExprValid) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); lvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr, DiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS); } return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return parseCompoundBinaryOperator(); } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); onKeyword = cloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION); return STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); } private STNode parseServiceName() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: recover(nextToken, ParserRuleContext.OPTIONAL_SERVICE_NAME); return parseServiceName(); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SERVICE_KEYWORD); return parseServiceKeyword(); } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ON_KEYWORD); return parseOnKeyword(); } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListeners(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); listeners.add(expr); STNode listenersMemberEnd; while (!isEndOfListeners(peek().kind)) { listenersMemberEnd = parseListenersMemberEnd(); if (listenersMemberEnd == null) { break; } listeners.add(listenersMemberEnd); expr = parseExpression(); listeners.add(expr); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfListeners(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListenersMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case OPEN_BRACE_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LISTENERS_LIST_END); return parseListenersMemberEnd(); } } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case RESOURCE_KEYWORD: case TRANSACTIONAL_KEYWORD: case FUNCTION_KEYWORD: case REMOTE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isEndOfServiceDecl(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.RESOURCE_DEF); return parseResource(); } return parseResource(metadata); } private STNode parseResource(STNode metadata) { STNode qualifierList = parseFunctionQualifiers(ParserRuleContext.RESOURCE_DEF_QUALIFIERS, new ArrayList<>()); return parseFuncDefinition(metadata, false, qualifierList); } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RESOURCE_KEYWORD); return parseResourceKeyword(); } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private STNode parseStmtStartsWithTupleTypeOrExprRhs(STNode annots, STNode tupleTypeOrListConst, boolean isRoot) { if (tupleTypeOrListConst.kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && tupleTypeOrListConst.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) { STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(tupleTypeOrListConst, ParserRuleContext.VAR_DECL_STMT, isRoot); if (!isRoot) { return typedBindingPattern; } switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBindingPattern, false); } STNode expr = getExpression(tupleTypeOrListConst); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList = getTypeDescList(memberList); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode tupleTypeMembers = parseTupleTypeMembers(member, memberList); STNode closeBracket = parseCloseBracket(); endContext(); STNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket); STNode typeDesc = parseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); endContext(); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot); if (!isRoot) { return typedBindingPattern; } switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList = getBindingPatternsList(memberList); memberList.add(member); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList); endContext(); if (!isRoot) { return listBindingPattern; } return parseAssignmentStmtRhs(listBindingPattern); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList) { memberList = getBindingPatternsList(memberList); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, memberList); endContext(); return listBindingPattern; } private STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode listBindingPatternOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot); } return listBindingPatternOrListCons; } private SyntaxKind getStmtStartBracketedListType(STNode memberNode) { if (memberNode.kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case NUMERIC_LITERAL: case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: return SyntaxKind.NONE; case FUNCTION_CALL: if (isPossibleErrorBindingPattern((STFunctionCallExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.LIST_CONSTRUCTOR; case INDEXED_EXPRESSION: return SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST; default: if (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) { return SyntaxKind.LIST_CONSTRUCTOR; } return SyntaxKind.NONE; } } private boolean isPossibleErrorBindingPattern(STFunctionCallExpressionNode funcCall) { STNode args = funcCall.arguments; int size = args.bucketCount(); for (int i = 0; i < size; i++) { STNode arg = args.childInBucket(i); if (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG && arg.kind != SyntaxKind.REST_ARG) { continue; } if (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) { return false; } } return true; } private boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) { switch (arg.kind) { case POSITIONAL_ARG: STNode expr = ((STPositionalArgumentNode) arg).expression; return isPosibleBindingPattern(expr); case NAMED_ARG: expr = ((STNamedArgumentNode) arg).expression; return isPosibleBindingPattern(expr); case REST_ARG: expr = ((STRestArgumentNode) arg).expression; return expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE; default: return false; } } private boolean isPosibleBindingPattern(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: return true; case LIST_CONSTRUCTOR: STListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node; for (int i = 0; i < listConstructor.bucketCount(); i++) { STNode expr = listConstructor.childInBucket(i); if (!isPosibleBindingPattern(expr)) { return false; } } return true; case MAPPING_CONSTRUCTOR: STMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node; for (int i = 0; i < mappingConstructor.bucketCount(); i++) { STNode expr = mappingConstructor.childInBucket(i); if (!isPosibleBindingPattern(expr)) { return false; } } return true; case SPECIFIC_FIELD: STSpecificFieldNode specificField = (STSpecificFieldNode) node; if (specificField.readonlyKeyword != null) { return false; } if (specificField.valueExpr == null) { return true; } return isPosibleBindingPattern(specificField.valueExpr); case FUNCTION_CALL: return isPossibleErrorBindingPattern((STFunctionCallExpressionNode) node); default: return false; } } private STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot, boolean possibleMappingField) { STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, memberBindingPatterns, restBindingPattern, closeBracket); endContext(); switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(listBindingPattern); case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } if (members.isEmpty()) { openBracket = SyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER); } switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); endContext(); STNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); endContext(); return parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern); case OPEN_BRACKET_TOKEN: if (!isRoot) { memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); endContext(); typeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); return typeDesc; } STAmbiguousCollectionNode list = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); endContext(); STNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr); case COLON_TOKEN: if (possibleMappingField && members.size() == 1) { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode colon = parseColon(); STNode fieldNameExpr = getExpression(members.get(0)); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } default: endContext(); if (!isRoot) { return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } list = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); STNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false); return parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB); } } private boolean isWildcardBP(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: STToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name; return isUnderscoreToken(nameToken); case IDENTIFIER_TOKEN: return isUnderscoreToken((STToken) node); default: return false; } } private boolean isUnderscoreToken(STToken token) { return "_".equals(token.text()); } private STNode getWildcardBindingPattern(STNode identifier) { switch (identifier.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) identifier).name; return STNodeFactory.createWildcardBindingPatternNode(varName); case IDENTIFIER_TOKEN: return STNodeFactory.createWildcardBindingPatternNode(identifier); default: throw new IllegalStateException(); } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-brace. */ /** * Parse statements that starts with open-brace. It could be a: * 1) Block statement * 2) Var-decl with mapping binding pattern. * 3) Statement that starts with mapping constructor expression. * * @return Parsed node */ private STNode parseStatementStartsWithOpenBrace() { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode openBrace = parseOpenBrace(); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { STNode closeBrace = parseCloseBrace(); switch (peek().kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode fields = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields, restBindingPattern, closeBrace); return parseAssignmentStmtRhs(bindingPattern); case RIGHT_ARROW_TOKEN: case SYNC_SEND_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); fields = STNodeFactory.createEmptyNodeList(); STNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); default: STNode statements = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } STNode member = parseStatementStartingBracedListFirstMember(); SyntaxKind nodeType = getBracedListType(member); STNode stmt; switch (nodeType) { case MAPPING_BINDING_PATTERN: return parseStmtAsMappingBindingPatternStart(openBrace, member); case MAPPING_CONSTRUCTOR: return parseStmtAsMappingConstructorStart(openBrace, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return parseStmtAsMappingBPOrMappingConsStart(openBrace, member); case BLOCK_STATEMENT: STNode closeBrace = parseCloseBrace(); stmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace); endContext(); return stmt; default: ArrayList<STNode> stmts = new ArrayList<>(); stmts.add(member); STNode statements = parseStatements(stmts); closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } /** * Parse the rest of the statement, treating the start as a mapping binding pattern. * * @param openBrace Open brace * @param firstMappingField First member * @return Parsed node */ private STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); List<STNode> bindingPatterns = new ArrayList<>(); if (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(getBindingPattern(firstMappingField)); } STNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField); return parseAssignmentStmtRhs(mappingBP); } /** * Parse the rest of the statement, treating the start as a mapping constructor expression. * * @param openBrace Open brace * @param firstMember First member * @return Parsed node */ private STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); List<STNode> members = new ArrayList<>(); STNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true); return parseStatementStartWithExprRhs(expr); } /** * Parse the braced-list as a mapping constructor expression. * * @param openBrace Open brace * @param members members list * @param member Most recently parsed member * @return Parsed node */ private STNode parseAsMappingConstructor(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getExpressionList(members); switchContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode fields = parseMappingConstructorFields(members); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse the rest of the statement, treating the start as a mapping binding pattern * or a mapping constructor expression. * * @param openBrace Open brace * @param member First member * @return Parsed node */ private STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); List<STNode> members = new ArrayList<>(); members.add(member); STNode bpOrConstructor; STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { STNode closeBrace = parseCloseBrace(); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace); } else { members.add(memberEnd); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members); } switch (bpOrConstructor.kind) { case MAPPING_CONSTRUCTOR: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true); return parseStatementStartWithExprRhs(expr); case MAPPING_BINDING_PATTERN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: if (peek().kind == SyntaxKind.EQUAL_TOKEN) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); } switchContext(ParserRuleContext.EXPRESSION_STATEMENT); expr = getExpression(bpOrConstructor); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } } /** * Parse a member of a braced-list that occurs at the start of a statement. * * @return Parsed node */ private STNode parseStatementStartingBracedListFirstMember() { STToken nextToken = peek(); switch (nextToken.kind) { case READONLY_KEYWORD: STNode readonlyKeyword = parseReadonlyKeyword(); return bracedListMemberStartsWithReadonly(readonlyKeyword); case IDENTIFIER_TOKEN: readonlyKeyword = STNodeFactory.createEmptyNode(); return parseIdentifierRhsInStmtStartingBrace(readonlyKeyword); case STRING_LITERAL_TOKEN: STNode key = parseStringLiteral(); if (peek().kind == SyntaxKind.COLON_TOKEN) { readonlyKeyword = STNodeFactory.createEmptyNode(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true); return parseStatementStartWithExprRhs(expr); case OPEN_BRACKET_TOKEN: STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, true); case OPEN_BRACE_TOKEN: switchContext(ParserRuleContext.BLOCK_STMT); return parseStatementStartsWithOpenBrace(); case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: switchContext(ParserRuleContext.BLOCK_STMT); return parseStatements(); } } private STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseIdentifierRhsInStmtStartingBrace(readonlyKeyword); case STRING_LITERAL_TOKEN: if (peek(2).kind == SyntaxKind.COLON_TOKEN) { STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); STNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); endContext(); STNode annots = STNodeFactory.createEmptyNodeList(); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBP, false); } } /** * Parse the rhs components of an identifier that follows an open brace, * at the start of a statement. i.e: "{foo". * * @param readonlyKeyword Readonly keyword * @return Parsed node */ private STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) { STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); switch (peek().kind) { case COMMA_TOKEN: STNode colon = STNodeFactory.createEmptyNode(); STNode value = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value); case COLON_TOKEN: colon = parseColon(); if (!isEmpty(readonlyKeyword)) { value = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value); } switch (peek().kind) { case OPEN_BRACKET_TOKEN: STNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case OPEN_BRACE_TOKEN: bindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case IDENTIFIER_TOKEN: return parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon); default: STNode expr = parseExpression(); return getMappingField(identifier, colon, expr); } default: switchContext(ParserRuleContext.BLOCK_STMT); if (!isEmpty(readonlyKeyword)) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier); STNode typedBindingPattern = STNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern); STNode annots = STNodeFactory.createEmptyNodeList(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, typedBindingPattern, false); } startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false); STNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true); STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTypedBPOrExprRhs(annots, expr); } } /** * Parse the rhs components of "<code>{ identifier : identifier</code>", * at the start of a statement. i.e: "{foo:bar". * * @return Parsed node */ private STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) { STNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier); if (isWildcardBP(secondIdentifier)) { return getWildcardBindingPattern(secondIdentifier); } STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef); switch (peek().kind) { case COMMA_TOKEN: return qualifiedNameRef; case OPEN_BRACE_TOKEN: case IDENTIFIER_TOKEN: STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT); STNode annots = STNodeFactory.createEmptyNodeList(); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); case OPEN_BRACKET_TOKEN: return parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef); case QUESTION_MARK_TOKEN: STNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); finalKeyword = STNodeFactory.createEmptyNode(); typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); annots = STNodeFactory.createEmptyNodeList(); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStatementStartWithExprRhs(qualifiedNameRef); case PIPE_TOKEN: case BITWISE_AND_TOKEN: default: return parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef); } } private SyntaxKind getBracedListType(STNode member) { switch (member.kind) { case FIELD_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) member).valueExpr; if (expr == null) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } switch (expr.kind) { case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; case ERROR_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case FUNCTION_CALL: if (isPossibleErrorBindingPattern((STFunctionCallExpressionNode) expr)) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; default: return SyntaxKind.MAPPING_CONSTRUCTOR; } case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case REST_BINDING_PATTERN: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; case LIST: return SyntaxKind.BLOCK_STATEMENT; default: return SyntaxKind.NONE; } } /** * Parse mapping binding pattern or mapping constructor. * * @return Parsed node */ private STNode parseMappingBindingPatterOrMappingConstructor() { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); List<STNode> memberList = new ArrayList<>(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList); } private boolean isBracedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> memberList) { STToken nextToken = peek(); while (!isBracedListEnd(nextToken.kind)) { STNode member = parseMappingBindingPatterOrMappingConstructorMember(); SyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member); switch (currentNodeType) { case MAPPING_CONSTRUCTOR: return parseAsMappingConstructor(openBrace, memberList, member); case MAPPING_BINDING_PATTERN: return parseAsMappingBindingPattern(openBrace, memberList, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBrace = parseCloseBrace(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace); } private STNode parseMappingBindingPatterOrMappingConstructorMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseMappingFieldRhs(key); case STRING_LITERAL_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return STNodeFactory.createRestBindingPatternNode(ellipsis, expr); } return STNodeFactory.createSpreadFieldNode(ellipsis, expr); default: recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER); return parseMappingBindingPatterOrMappingConstructorMember(); } } private STNode parseMappingFieldRhs(STNode key) { STNode colon; STNode valueExpr; switch (peek().kind) { case COLON_TOKEN: colon = parseColon(); return parseMappingFieldValue(key, colon); case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); default: STToken token = peek(); recover(token, ParserRuleContext.FIELD_BINDING_PATTERN_END, key); readonlyKeyword = STNodeFactory.createEmptyNode(); return parseSpecificFieldRhs(readonlyKeyword, key); } } private STNode parseMappingFieldValue(STNode key, STNode colon) { STNode expr; switch (peek().kind) { case IDENTIFIER_TOKEN: expr = parseExpression(); break; case OPEN_BRACKET_TOKEN: expr = parseListBindingPatternOrListConstructor(); break; case OPEN_BRACE_TOKEN: expr = parseMappingBindingPatterOrMappingConstructor(); break; default: expr = parseExpression(); break; } if (isBindingPattern(expr.kind)) { return STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr); } STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr); } private boolean isBindingPattern(SyntaxKind kind) { switch (kind) { case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return true; default: return false; } } private SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) { switch (memberNode.kind) { case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) memberNode).valueExpr; if (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR || expr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case REST_BINDING_PATTERN: default: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> members, STNode closeBrace) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members, closeBrace); } private STNode parseAsMappingBindingPattern(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getBindingPatternsList(members); switchContext(ParserRuleContext.MAPPING_BINDING_PATTERN); return parseMappingBindingPattern(openBrace, members, member); } /** * Parse list binding pattern or list constructor. * * @return Parsed node */ private STNode parseListBindingPatternOrListConstructor() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseListBindingPatternOrListConstructor(openBracket, memberList, false); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseListBindingPatternOrListConstructorMember(); SyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot); } private STNode parseListBindingPatternOrListConstructorMember() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPatternOrListConstructor(); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { return getWildcardBindingPattern(identifier); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(); } recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER); return parseListBindingPatternOrListConstructorMember(); } } private SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) { switch (memberNode.kind) { case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; default: return SyntaxKind.LIST_CONSTRUCTOR; } } private STNode parseAsListConstructor(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); memberList = getExpressionList(memberList); switchContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode expressions = parseOptionalExpressionsList(memberList); STNode closeBracket = parseCloseBracket(); STNode listConstructor = STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); endContext(); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false); if (!isRoot) { return expr; } return parseStatementStartWithExprRhs(expr); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode lbpOrListCons; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); lbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } members = getBindingPatternsList(members); STNode bindingPatternsNode = STNodeFactory.createNodeList(members); STNode restBindingPattern = STNodeFactory.createEmptyNode(); lbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); break; } endContext(); if (!isRoot) { return lbpOrListCons; } return parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons); } private STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) { STNode typedBPOrExpr = parseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT); if (isExpression(typedBPOrExpr.kind)) { return parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr); } switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.VAR_DECL_STMT); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode annots = STNodeFactory.createEmptyNodeList(); STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef, ((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor); return parseVarDeclRhs(annots, finalKeyword, typeDesc, false); } /** * Parse a member that starts with "foo:bar[", in a statement starting with a brace. * * @param identifier First identifier of the statement * @param colon Colon that follows the first identifier * @param secondIdentifier Identifier that follows the colon * @param memberAccessExpr Member access expression * @return Parsed node */ private STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier, STNode memberAccessExpr) { STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true); switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr); case EQUAL_TOKEN: case SEMICOLON_TOKEN: default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode qualifiedName = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr); return parseStatementStartWithExprRhs(updatedExpr); } } /** * Replace the first identifier of an expression, with a given qualified-identifier. * Only expressions that can start with "bar[..]" can reach here. * * @param qualifiedName Qualified identifier to replace simple identifier * @param exprOrAction Expression or action * @return Updated expression */ private STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) { switch (exprOrAction.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction; STNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr); return STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator, binaryExpr.rhsExpr); case FIELD_ACCESS: STFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken, fieldAccess.fieldName); case INDEXED_EXPRESSION: STIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression); return STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket, memberAccess.keyExpression, memberAccess.closeBracket); case TYPE_TEST_EXPRESSION: STTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression); return STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword, typeTest.typeDescriptor); case ANNOT_ACCESS: STAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken, annotAccess.annotTagReference); case OPTIONAL_FIELD_ACCESS: STOptionalFieldAccessExpressionNode optionalFieldAccess = (STOptionalFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, optionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName); case CONDITIONAL_EXPRESSION: STConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression); return STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken, conditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression); case REMOTE_METHOD_CALL_ACTION: STRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression); return STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken, remoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments, remoteCall.closeParenToken); case ASYNC_SEND_ACTION: STAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken, asyncSend.peerWorker); case SYNC_SEND_ACTION: STSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker); default: return exprOrAction; } } private STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) { switch (typeDesc.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case ARRAY_TYPE_DESC: STArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc; STNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc); return createArrayTypeDesc(newMemberType, arrayTypeDesc.openBracket, arrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket); case UNION_TYPE_DESC: STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc; STNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc); return createUnionTypeDesc(newlhsType, unionTypeDesc.pipeToken, unionTypeDesc.rightTypeDesc); case INTERSECTION_TYPE_DESC: STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc; newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc); return createUnionTypeDesc(newlhsType, intersectionTypeDesc.bitwiseAndToken, intersectionTypeDesc.rightTypeDesc); case OPTIONAL_TYPE_DESC: STOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc; newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor); return STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken); default: return typeDesc; } } private List<STNode> getTypeDescList(List<STNode> ambiguousList) { List<STNode> typeDescList = new ArrayList<>(); for (STNode item : ambiguousList) { typeDescList.add(getTypeDescFromExpr(item)); } return typeDescList; } /** * Create a type-desc out of an expression. * * @param expression Expression * @return Type descriptor */ private STNode getTypeDescFromExpr(STNode expression) { switch (expression.kind) { case INDEXED_EXPRESSION: return parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression); case NUMERIC_LITERAL: case BOOLEAN_LITERAL: case STRING_LITERAL: case NULL_LITERAL: return STNodeFactory.createSingletonTypeDescriptorNode(expression); case TYPE_REFERENCE_TYPE_DESC: return ((STTypeReferenceTypeDescNode) expression).typeRef; case BRACED_EXPRESSION: STBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression; STNode typeDesc = getTypeDescFromExpr(bracedExpr.expression); return STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc, bracedExpr.closeParen); case NIL_LITERAL: STNilLiteralNode nilLiteral = (STNilLiteralNode) expression; return STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken); case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression; STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members)); return STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs, innerList.collectionEndToken); case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression; switch (binaryExpr.operator.kind) { case PIPE_TOKEN: STNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr); STNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr); return createUnionTypeDesc(lhsTypeDesc, binaryExpr.operator, rhsTypeDesc); case BITWISE_AND_TOKEN: lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr); rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr); return createIntersectionTypeDesc(lhsTypeDesc, binaryExpr.operator, rhsTypeDesc); default: break; } return expression; case UNARY_EXPRESSION: return STNodeFactory.createSingletonTypeDescriptorNode(expression); case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return expression; } } private List<STNode> getBindingPatternsList(List<STNode> ambibuousList) { List<STNode> bindingPatterns = new ArrayList<STNode>(); for (STNode item : ambibuousList) { bindingPatterns.add(getBindingPattern(item)); } return bindingPatterns; } private STNode getBindingPattern(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name; return createCaptureOrWildcardBP(varName); case QUALIFIED_NAME_REFERENCE: STQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode; STNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix); return STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon, getBindingPattern(qualifiedName.identifier)); case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; List<STNode> bindingPatterns = new ArrayList<>(); restBindingPattern = STNodeFactory.createEmptyNode(); for (int i = 0; i < innerList.members.size(); i++) { STNode bp = getBindingPattern(innerList.members.get(i)); if (bp.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bp; break; } bindingPatterns.add(bp); } memberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; fieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName); if (field.valueExpr == null) { return STNodeFactory.createFieldBindingPatternVarnameNode(fieldName); } return STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon, getBindingPattern(field.valueExpr)); case FUNCTION_CALL: STFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode; STNode args = funcCall.arguments; int size = args.bucketCount(); bindingPatterns = new ArrayList<>(); for (int i = 0; i < size; i++) { STNode arg = args.childInBucket(i); bindingPatterns.add(getBindingPattern(arg)); } STNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns); STNode errorKeyword; STNode typeRef; if (funcCall.functionName.kind == SyntaxKind.ERROR_TYPE_DESC) { errorKeyword = funcCall.functionName; typeRef = STNodeFactory.createEmptyNode(); } else { errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD); typeRef = funcCall.functionName; } return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, funcCall.openParenToken, argListBindingPatterns, funcCall.closeParenToken); case POSITIONAL_ARG: STPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode; return getBindingPattern(positionalArg.expression); case NAMED_ARG: STNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode; return STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken, getBindingPattern(namedArg.expression)); case REST_ARG: STRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode; return STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression); default: return ambiguousNode; } } private List<STNode> getExpressionList(List<STNode> ambibuousList) { List<STNode> exprList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { exprList.add(getExpression(item)); } return exprList; } private STNode getExpression(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: case TUPLE_TYPE_DESC_OR_LIST_CONST: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members)); return STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; List<STNode> fieldList = new ArrayList<>(); for (int i = 0; i < innerList.members.size(); i++) { STNode field = innerList.members.get(i); STNode fieldNode; if (field.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRefNode = (STQualifiedNameReferenceNode) field; STNode readOnlyKeyword = STNodeFactory.createEmptyNode(); STNode fieldName = qualifiedNameRefNode.modulePrefix; STNode colon = qualifiedNameRefNode.colon; STNode valueExpr = getExpression(qualifiedNameRefNode.identifier); fieldNode = STNodeFactory.createSpecificFieldNode(readOnlyKeyword, fieldName, colon, valueExpr); } else { fieldNode = getExpression(field); } fieldList.add(fieldNode); } STNode fields = STNodeFactory.createNodeList(fieldList); return STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken, fields, innerList.collectionEndToken); case REST_BINDING_PATTERN: STRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode; return STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken, restBindingPattern.variableName); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; return STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon, getExpression(field.valueExpr)); case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return ambiguousNode; } } private STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) { STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); switch (bindingPatternOrExpr.kind) { case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: return STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr); case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier); case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr); } } }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_START; } recover(peek(), recoveryCtx); return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
We should clarify in the comment that this in an optimization which only works for embedded execution or when the user code has been added to the Flink classpath (e.g. via the lib folder). Potentially we also need to invert class loading behavior via the Flink config to ensure that the parent is searched first.
private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { release(wrapper); } }
if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) {
private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { release(wrapper); } }
class ReferenceCountingFlinkExecutableStageContextFactory implements FlinkExecutableStageContext.Factory { private static final Logger LOG = LoggerFactory.getLogger(ReferenceCountingFlinkExecutableStageContextFactory.class); private static final int MAX_RETRY = 3; private final Creator creator; private transient volatile ScheduledExecutorService executor; private transient volatile ConcurrentHashMap<String, WrappedContext> keyRegistry; public static ReferenceCountingFlinkExecutableStageContextFactory create(Creator creator) { return new ReferenceCountingFlinkExecutableStageContextFactory(creator); } private ReferenceCountingFlinkExecutableStageContextFactory(Creator creator) { this.creator = creator; } @Override public FlinkExecutableStageContext get(JobInfo jobInfo) { for (int retry = 0; retry < MAX_RETRY; retry++) { WrappedContext wrapper = getCache() .computeIfAbsent( jobInfo.jobId(), jobId -> { try { return new WrappedContext(jobInfo, creator.apply(jobInfo)); } catch (Exception e) { throw new RuntimeException( "Unable to create context for job " + jobInfo.jobId(), e); } }); synchronized (wrapper) { if (wrapper.referenceCount != null) { wrapper.referenceCount.incrementAndGet(); return wrapper; } } } throw new RuntimeException( String.format( "Max retry %s exhausted while creating Context for job %s", MAX_RETRY, jobInfo.jobId())); } @SuppressWarnings("FutureReturnValueIgnored") private ConcurrentHashMap<String, WrappedContext> getCache() { if (keyRegistry != null) { return keyRegistry; } synchronized (this) { if (keyRegistry == null) { keyRegistry = new ConcurrentHashMap<>(); } return keyRegistry; } } private ScheduledExecutorService getExecutor() { if (executor != null) { return executor; } synchronized (this) { if (executor == null) { executor = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build()); } return executor; } } @VisibleForTesting void release(FlinkExecutableStageContext context) { @SuppressWarnings({"unchecked", "Not exected to be called from outside."}) WrappedContext wrapper = (WrappedContext) context; synchronized (wrapper) { if (wrapper.referenceCount.decrementAndGet() == 0) { wrapper.referenceCount = null; if (getCache().remove(wrapper.jobInfo.jobId(), wrapper)) { try { wrapper.closeActual(); } catch (Throwable t) { LOG.error("Unable to close FlinkExecutableStageContext.", t); } } } } } /** * {@link WrappedContext} does not expose equals of actual {@link FlinkExecutableStageContext}. */ private class WrappedContext implements FlinkExecutableStageContext { private JobInfo jobInfo; private AtomicInteger referenceCount; private FlinkExecutableStageContext context; /** {@link WrappedContext WrappedContext(JobInfo jobInfo, FlinkExecutableStageContext context) { this.jobInfo = jobInfo; this.context = context; this.referenceCount = new AtomicInteger(0); } @Override public StageBundleFactory getStageBundleFactory(ExecutableStage executableStage) { return context.getStageBundleFactory(executableStage); } @Override public void close() { scheduleRelease(jobInfo); } private void closeActual() throws Exception { context.close(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WrappedContext that = (WrappedContext) o; return Objects.equals(jobInfo.jobId(), that.jobInfo.jobId()); } @Override public int hashCode() { return Objects.hash(jobInfo); } @Override public String toString() { return "ContextWrapper{" + "jobId='" + jobInfo + '\'' + ", referenceCount=" + referenceCount + '}'; } } /** Interface for creator which extends Serializable. */ public interface Creator extends ThrowingFunction<JobInfo, FlinkExecutableStageContext>, Serializable {} }
class ReferenceCountingFlinkExecutableStageContextFactory implements FlinkExecutableStageContext.Factory { private static final Logger LOG = LoggerFactory.getLogger(ReferenceCountingFlinkExecutableStageContextFactory.class); private static final int MAX_RETRY = 3; private final Creator creator; private transient volatile ScheduledExecutorService executor; private transient volatile ConcurrentHashMap<String, WrappedContext> keyRegistry; public static ReferenceCountingFlinkExecutableStageContextFactory create(Creator creator) { return new ReferenceCountingFlinkExecutableStageContextFactory(creator); } private ReferenceCountingFlinkExecutableStageContextFactory(Creator creator) { this.creator = creator; } @Override public FlinkExecutableStageContext get(JobInfo jobInfo) { for (int retry = 0; retry < MAX_RETRY; retry++) { WrappedContext wrapper = getCache() .computeIfAbsent( jobInfo.jobId(), jobId -> { try { return new WrappedContext(jobInfo, creator.apply(jobInfo)); } catch (Exception e) { throw new RuntimeException( "Unable to create context for job " + jobInfo.jobId(), e); } }); synchronized (wrapper) { if (wrapper.referenceCount != null) { wrapper.referenceCount.incrementAndGet(); return wrapper; } } } throw new RuntimeException( String.format( "Max retry %s exhausted while creating Context for job %s", MAX_RETRY, jobInfo.jobId())); } @SuppressWarnings("FutureReturnValueIgnored") private ConcurrentHashMap<String, WrappedContext> getCache() { if (keyRegistry != null) { return keyRegistry; } synchronized (this) { if (keyRegistry == null) { keyRegistry = new ConcurrentHashMap<>(); } return keyRegistry; } } private ScheduledExecutorService getExecutor() { if (executor != null) { return executor; } synchronized (this) { if (executor == null) { executor = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build()); } return executor; } } @VisibleForTesting void release(FlinkExecutableStageContext context) { @SuppressWarnings({"unchecked", "Not exected to be called from outside."}) WrappedContext wrapper = (WrappedContext) context; synchronized (wrapper) { if (wrapper.referenceCount.decrementAndGet() == 0) { wrapper.referenceCount = null; if (getCache().remove(wrapper.jobInfo.jobId(), wrapper)) { try { wrapper.closeActual(); } catch (Throwable t) { LOG.error("Unable to close FlinkExecutableStageContext.", t); } } } } } /** * {@link WrappedContext} does not expose equals of actual {@link FlinkExecutableStageContext}. */ private class WrappedContext implements FlinkExecutableStageContext { private JobInfo jobInfo; private AtomicInteger referenceCount; private FlinkExecutableStageContext context; /** {@link WrappedContext WrappedContext(JobInfo jobInfo, FlinkExecutableStageContext context) { this.jobInfo = jobInfo; this.context = context; this.referenceCount = new AtomicInteger(0); } @Override public StageBundleFactory getStageBundleFactory(ExecutableStage executableStage) { return context.getStageBundleFactory(executableStage); } @Override public void close() { scheduleRelease(jobInfo); } private void closeActual() throws Exception { context.close(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WrappedContext that = (WrappedContext) o; return Objects.equals(jobInfo.jobId(), that.jobInfo.jobId()); } @Override public int hashCode() { return Objects.hash(jobInfo); } @Override public String toString() { return "ContextWrapper{" + "jobId='" + jobInfo + '\'' + ", referenceCount=" + referenceCount + '}'; } } /** Interface for creator which extends Serializable. */ public interface Creator extends ThrowingFunction<JobInfo, FlinkExecutableStageContext>, Serializable {} }
You might use `Strings.isNullOrEmpty` instead of two checks.
public ConnectionConfiguration withApiKey(String apiKey) { checkArgument(apiKey != null, "apiKey can not be null"); checkArgument(!apiKey.isEmpty(), "apiKey can not be empty"); return builder().setApiKey(apiKey).build(); }
checkArgument(!apiKey.isEmpty(), "apiKey can not be empty");
public ConnectionConfiguration withApiKey(String apiKey) { checkArgument(!Strings.isNullOrEmpty(apiKey), "apiKey can not be null or empty"); return builder().setApiKey(apiKey).build(); }
class Builder { abstract Builder setAddresses(List<String> addresses); abstract Builder setUsername(String username); abstract Builder setPassword(String password); abstract Builder setApiKey(String apiKey); abstract Builder setBearerToken(String bearerToken); abstract Builder setKeystorePath(String keystorePath); abstract Builder setKeystorePassword(String password); abstract Builder setIndex(String index); abstract Builder setType(String type); abstract Builder setSocketTimeout(Integer maxRetryTimeout); abstract Builder setConnectTimeout(Integer connectTimeout); abstract Builder setTrustSelfSignedCerts(boolean trustSelfSignedCerts); abstract ConnectionConfiguration build(); }
class Builder { abstract Builder setAddresses(List<String> addresses); abstract Builder setUsername(String username); abstract Builder setPassword(String password); abstract Builder setApiKey(String apiKey); abstract Builder setBearerToken(String bearerToken); abstract Builder setKeystorePath(String keystorePath); abstract Builder setKeystorePassword(String password); abstract Builder setIndex(String index); abstract Builder setType(String type); abstract Builder setSocketTimeout(Integer maxRetryTimeout); abstract Builder setConnectTimeout(Integer connectTimeout); abstract Builder setTrustSelfSignedCerts(boolean trustSelfSignedCerts); abstract ConnectionConfiguration build(); }
`instanceof` also means that `readTimeout` is not `null` (and IntelliJ hints as much if one tries to add the null check)
public Future<HttpClientRequest> createRequest(RestClientRequestContext state) { HttpClient httpClient = state.getHttpClient(); URI uri = state.getUri(); boolean isHttps = "https".equals(uri.getScheme()); int port = uri.getPort() != -1 ? uri.getPort() : (isHttps ? 443 : 80); RequestOptions requestOptions = new RequestOptions(); requestOptions.setHost(uri.getHost()); requestOptions.setPort(port); requestOptions.setMethod(HttpMethod.valueOf(state.getHttpMethod())); requestOptions.setURI(uri.getPath() + (uri.getQuery() == null ? "" : "?" + uri.getQuery())); requestOptions.setFollowRedirects(followRedirects); requestOptions.setSsl(isHttps); Object readTimeout = state.getConfiguration().getProperty(QuarkusRestClientProperties.READ_TIMEOUT); if ((readTimeout instanceof Long)) { requestOptions.setTimeout((Long) readTimeout); } return httpClient.request(requestOptions); }
if ((readTimeout instanceof Long)) {
public Future<HttpClientRequest> createRequest(RestClientRequestContext state) { HttpClient httpClient = state.getHttpClient(); URI uri = state.getUri(); boolean isHttps = "https".equals(uri.getScheme()); int port = uri.getPort() != -1 ? uri.getPort() : (isHttps ? 443 : 80); RequestOptions requestOptions = new RequestOptions(); requestOptions.setHost(uri.getHost()); requestOptions.setPort(port); requestOptions.setMethod(HttpMethod.valueOf(state.getHttpMethod())); requestOptions.setURI(uri.getPath() + (uri.getQuery() == null ? "" : "?" + uri.getQuery())); requestOptions.setFollowRedirects(followRedirects); requestOptions.setSsl(isHttps); Object readTimeout = state.getConfiguration().getProperty(QuarkusRestClientProperties.READ_TIMEOUT); if (readTimeout instanceof Long) { requestOptions.setTimeout((Long) readTimeout); } return httpClient.request(requestOptions); }
class ClientSendRequestHandler implements ClientRestHandler { private final boolean followRedirects; public ClientSendRequestHandler(boolean followRedirects) { this.followRedirects = followRedirects; } @Override public void handle(RestClientRequestContext requestContext) { if (requestContext.isAborted()) { return; } requestContext.suspend(); Future<HttpClientRequest> future = createRequest(requestContext); future.onFailure(new Handler<Throwable>() { @Override public void handle(Throwable event) { if (event instanceof IOException) { requestContext.resume(new ProcessingException(event)); } else { requestContext.resume(event); } } }); future.onSuccess(new Handler<HttpClientRequest>() { @Override public void handle(HttpClientRequest httpClientRequest) { Future<HttpClientResponse> sent; if (requestContext.isMultipart()) { Promise<HttpClientRequest> requestPromise = Promise.promise(); MultipartFormUpload actualEntity; try { actualEntity = ClientSendRequestHandler.this.setMultipartHeadersAndPrepareBody(httpClientRequest, requestContext); Pipe<Buffer> pipe = actualEntity.pipe(); requestPromise.future().onComplete(ar -> { if (ar.succeeded()) { HttpClientRequest req = ar.result(); if (httpClientRequest.headers() == null || !httpClientRequest.headers().contains(HttpHeaders.CONTENT_LENGTH)) { req.setChunked(true); } pipe.endOnFailure(false); pipe.to(req, ar2 -> { if (ar2.failed()) { req.reset(0L, ar2.cause()); } }); actualEntity.run(); } else { pipe.close(); } }); sent = httpClientRequest.response(); requestPromise.complete(httpClientRequest); } catch (Throwable e) { requestContext.resume(e); return; } } else { Buffer actualEntity; try { actualEntity = ClientSendRequestHandler.this .setRequestHeadersAndPrepareBody(httpClientRequest, requestContext); } catch (Throwable e) { requestContext.resume(e); return; } if (actualEntity == AsyncInvokerImpl.EMPTY_BUFFER) { sent = httpClientRequest.send(); } else { sent = httpClientRequest.send(actualEntity); } } sent.onSuccess(new Handler<HttpClientResponse>() { @Override public void handle(HttpClientResponse clientResponse) { try { requestContext.initialiseResponse(clientResponse); if (!requestContext.isRegisterBodyHandler()) { clientResponse.pause(); requestContext.resume(); } else { clientResponse.bodyHandler(new Handler<Buffer>() { @Override public void handle(Buffer buffer) { try { if (buffer.length() > 0) { requestContext.setResponseEntityStream( new ByteArrayInputStream(buffer.getBytes())); } else { requestContext.setResponseEntityStream(null); } requestContext.resume(); } catch (Throwable t) { requestContext.resume(t); } } }); } } catch (Throwable t) { requestContext.resume(t); } } }) .onFailure(new Handler<Throwable>() { @Override public void handle(Throwable failure) { if (failure instanceof IOException) { requestContext.resume(new ProcessingException(failure)); } else { requestContext.resume(failure); } } }); } }); } private MultipartFormUpload setMultipartHeadersAndPrepareBody(HttpClientRequest httpClientRequest, RestClientRequestContext state) throws Exception { if (!(state.getEntity().getEntity() instanceof MultipartForm)) { throw new IllegalArgumentException( "Multipart form upload expects an entity of type MultipartForm, got: " + state.getEntity().getEntity()); } MultivaluedMap<String, String> headerMap = state.getRequestHeaders().asMap(); MultipartForm entity = (MultipartForm) state.getEntity().getEntity(); Object property = state.getConfiguration().getProperty(QuarkusRestClientProperties.MULTIPART_ENCODER_MODE); HttpPostRequestEncoder.EncoderMode mode = HttpPostRequestEncoder.EncoderMode.RFC1738; if (property != null) { mode = (HttpPostRequestEncoder.EncoderMode) property; } MultipartFormUpload multipartFormUpload = new MultipartFormUpload(Vertx.currentContext(), entity, true, mode); setEntityRelatedHeaders(headerMap, state.getEntity()); MultiMap multipartHeaders = multipartFormUpload.headers(); for (String multipartHeader : multipartHeaders.names()) { headerMap.put(multipartHeader, multipartHeaders.getAll(multipartHeader)); } setVertxHeaders(httpClientRequest, headerMap); return multipartFormUpload; } private Buffer setRequestHeadersAndPrepareBody(HttpClientRequest httpClientRequest, RestClientRequestContext state) throws IOException { MultivaluedMap<String, String> headerMap = state.getRequestHeaders().asMap(); Buffer actualEntity = AsyncInvokerImpl.EMPTY_BUFFER; Entity<?> entity = state.getEntity(); if (entity != null) { setEntityRelatedHeaders(headerMap, entity); actualEntity = state.writeEntity(entity, headerMap, state.getConfiguration().getWriterInterceptors().toArray(Serialisers.NO_WRITER_INTERCEPTOR)); } setVertxHeaders(httpClientRequest, headerMap); return actualEntity; } private void setVertxHeaders(HttpClientRequest httpClientRequest, MultivaluedMap<String, String> headerMap) { MultiMap vertxHttpHeaders = httpClientRequest.headers(); for (Map.Entry<String, List<String>> entry : headerMap.entrySet()) { vertxHttpHeaders.add(entry.getKey(), entry.getValue()); } } private void setEntityRelatedHeaders(MultivaluedMap<String, String> headerMap, Entity<?> entity) { if (entity.getVariant() != null) { Variant v = entity.getVariant(); headerMap.putSingle(HttpHeaders.CONTENT_TYPE, v.getMediaType().toString()); if (v.getLanguageString() != null) { headerMap.putSingle(HttpHeaders.CONTENT_LANGUAGE, v.getLanguageString()); } if (v.getEncoding() != null) { headerMap.putSingle(HttpHeaders.CONTENT_ENCODING, v.getEncoding()); } } } }
class ClientSendRequestHandler implements ClientRestHandler { private final boolean followRedirects; public ClientSendRequestHandler(boolean followRedirects) { this.followRedirects = followRedirects; } @Override public void handle(RestClientRequestContext requestContext) { if (requestContext.isAborted()) { return; } requestContext.suspend(); Future<HttpClientRequest> future = createRequest(requestContext); future.onFailure(new Handler<Throwable>() { @Override public void handle(Throwable event) { if (event instanceof IOException) { requestContext.resume(new ProcessingException(event)); } else { requestContext.resume(event); } } }); future.onSuccess(new Handler<HttpClientRequest>() { @Override public void handle(HttpClientRequest httpClientRequest) { Future<HttpClientResponse> sent; if (requestContext.isMultipart()) { Promise<HttpClientRequest> requestPromise = Promise.promise(); MultipartFormUpload actualEntity; try { actualEntity = ClientSendRequestHandler.this.setMultipartHeadersAndPrepareBody(httpClientRequest, requestContext); Pipe<Buffer> pipe = actualEntity.pipe(); requestPromise.future().onComplete(ar -> { if (ar.succeeded()) { HttpClientRequest req = ar.result(); if (httpClientRequest.headers() == null || !httpClientRequest.headers().contains(HttpHeaders.CONTENT_LENGTH)) { req.setChunked(true); } pipe.endOnFailure(false); pipe.to(req, ar2 -> { if (ar2.failed()) { req.reset(0L, ar2.cause()); } }); actualEntity.run(); } else { pipe.close(); } }); sent = httpClientRequest.response(); requestPromise.complete(httpClientRequest); } catch (Throwable e) { requestContext.resume(e); return; } } else { Buffer actualEntity; try { actualEntity = ClientSendRequestHandler.this .setRequestHeadersAndPrepareBody(httpClientRequest, requestContext); } catch (Throwable e) { requestContext.resume(e); return; } if (actualEntity == AsyncInvokerImpl.EMPTY_BUFFER) { sent = httpClientRequest.send(); } else { sent = httpClientRequest.send(actualEntity); } } sent.onSuccess(new Handler<HttpClientResponse>() { @Override public void handle(HttpClientResponse clientResponse) { try { requestContext.initialiseResponse(clientResponse); if (!requestContext.isRegisterBodyHandler()) { clientResponse.pause(); requestContext.resume(); } else { clientResponse.bodyHandler(new Handler<Buffer>() { @Override public void handle(Buffer buffer) { try { if (buffer.length() > 0) { requestContext.setResponseEntityStream( new ByteArrayInputStream(buffer.getBytes())); } else { requestContext.setResponseEntityStream(null); } requestContext.resume(); } catch (Throwable t) { requestContext.resume(t); } } }); } } catch (Throwable t) { requestContext.resume(t); } } }) .onFailure(new Handler<Throwable>() { @Override public void handle(Throwable failure) { if (failure instanceof IOException) { requestContext.resume(new ProcessingException(failure)); } else { requestContext.resume(failure); } } }); } }); } private MultipartFormUpload setMultipartHeadersAndPrepareBody(HttpClientRequest httpClientRequest, RestClientRequestContext state) throws Exception { if (!(state.getEntity().getEntity() instanceof MultipartForm)) { throw new IllegalArgumentException( "Multipart form upload expects an entity of type MultipartForm, got: " + state.getEntity().getEntity()); } MultivaluedMap<String, String> headerMap = state.getRequestHeaders().asMap(); MultipartForm entity = (MultipartForm) state.getEntity().getEntity(); Object property = state.getConfiguration().getProperty(QuarkusRestClientProperties.MULTIPART_ENCODER_MODE); HttpPostRequestEncoder.EncoderMode mode = HttpPostRequestEncoder.EncoderMode.RFC1738; if (property != null) { mode = (HttpPostRequestEncoder.EncoderMode) property; } MultipartFormUpload multipartFormUpload = new MultipartFormUpload(Vertx.currentContext(), entity, true, mode); setEntityRelatedHeaders(headerMap, state.getEntity()); MultiMap multipartHeaders = multipartFormUpload.headers(); for (String multipartHeader : multipartHeaders.names()) { headerMap.put(multipartHeader, multipartHeaders.getAll(multipartHeader)); } setVertxHeaders(httpClientRequest, headerMap); return multipartFormUpload; } private Buffer setRequestHeadersAndPrepareBody(HttpClientRequest httpClientRequest, RestClientRequestContext state) throws IOException { MultivaluedMap<String, String> headerMap = state.getRequestHeaders().asMap(); Buffer actualEntity = AsyncInvokerImpl.EMPTY_BUFFER; Entity<?> entity = state.getEntity(); if (entity != null) { setEntityRelatedHeaders(headerMap, entity); actualEntity = state.writeEntity(entity, headerMap, state.getConfiguration().getWriterInterceptors().toArray(Serialisers.NO_WRITER_INTERCEPTOR)); } setVertxHeaders(httpClientRequest, headerMap); return actualEntity; } private void setVertxHeaders(HttpClientRequest httpClientRequest, MultivaluedMap<String, String> headerMap) { MultiMap vertxHttpHeaders = httpClientRequest.headers(); for (Map.Entry<String, List<String>> entry : headerMap.entrySet()) { vertxHttpHeaders.add(entry.getKey(), entry.getValue()); } } private void setEntityRelatedHeaders(MultivaluedMap<String, String> headerMap, Entity<?> entity) { if (entity.getVariant() != null) { Variant v = entity.getVariant(); headerMap.putSingle(HttpHeaders.CONTENT_TYPE, v.getMediaType().toString()); if (v.getLanguageString() != null) { headerMap.putSingle(HttpHeaders.CONTENT_LANGUAGE, v.getLanguageString()); } if (v.getEncoding() != null) { headerMap.putSingle(HttpHeaders.CONTENT_ENCODING, v.getEncoding()); } } } }
I believe the reason behind `set` was this duplicate issue. With this PR, we cannot remove a value anymore or decide when we want a single or multiple values of a header (even if I agree that having multiple values of the same header is rarely used, this PR may introduce that case a lot more).
public static void applyFilters(Map<String, FilterConfig> filtersInConfig, Router httpRouteRouter) { if (!filtersInConfig.isEmpty()) { for (var entry : filtersInConfig.entrySet()) { var filterConfig = entry.getValue(); var matches = filterConfig.matches; var order = filterConfig.order.orElse(Integer.MIN_VALUE); var methods = filterConfig.methods; var headers = filterConfig.header; if (methods.isEmpty()) { httpRouteRouter.routeWithRegex(matches) .order(order) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().addAll(headers); event.next(); } }); } else { for (var method : methods.get()) { httpRouteRouter.routeWithRegex(HttpMethod.valueOf(method.toUpperCase(Locale.ROOT)), matches) .order(order) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().addAll(headers); event.next(); } }); } } } } }
event.response().headers().addAll(headers);
public static void applyFilters(Map<String, FilterConfig> filtersInConfig, Router httpRouteRouter) { if (!filtersInConfig.isEmpty()) { for (var entry : filtersInConfig.entrySet()) { var filterConfig = entry.getValue(); var matches = filterConfig.matches; var order = filterConfig.order.orElse(Integer.MIN_VALUE); var methods = filterConfig.methods; var headers = filterConfig.header; if (methods.isEmpty()) { httpRouteRouter.routeWithRegex(matches) .order(order) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { addFilterHeaders(event, headers); event.next(); } }); } else { for (var method : methods.get()) { httpRouteRouter.routeWithRegex(HttpMethod.valueOf(method.toUpperCase(Locale.ROOT)), matches) .order(order) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { addFilterHeaders(event, headers); event.next(); } }); } } } } }
class HttpServerCommonHandlers { public static void enforceMaxBodySize(ServerLimitsConfig limits, Router httpRouteRouter) { if (limits.maxBodySize.isPresent()) { long limit = limits.maxBodySize.get().asLongValue(); Long limitObj = limit; httpRouteRouter.route().order(RouteConstants.ROUTE_ORDER_UPLOAD_LIMIT).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { String lengthString = event.request().headers().get(HttpHeaderNames.CONTENT_LENGTH); if (lengthString != null) { long length = Long.parseLong(lengthString); if (length > limit) { event.response().headers().add(HttpHeaderNames.CONNECTION, "close"); event.response().setStatusCode(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE.code()); event.response().endHandler(new Handler<Void>() { @Override public void handle(Void e) { event.request().connection().close(); } }); event.response().end(); return; } } else { event.put(VertxHttpRecorder.MAX_REQUEST_SIZE_KEY, limitObj); } event.next(); } }); } } public static Handler<HttpServerRequest> enforceDuplicatedContext(Handler<HttpServerRequest> delegate) { return new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { if (!VertxContext.isOnDuplicatedContext()) { Context context = VertxContext.createNewDuplicatedContext(); context.runOnContext(new Handler<Void>() { @Override public void handle(Void x) { setCurrentContextSafe(true); delegate.handle(new ResumingRequestWrapper(event)); } }); } else { setCurrentContextSafe(true); delegate.handle(new ResumingRequestWrapper(event)); } } }; } public static Handler<HttpServerRequest> applyProxy(ProxyConfig proxyConfig, Handler<HttpServerRequest> root, Supplier<Vertx> vertx) { if (proxyConfig.proxyAddressForwarding) { final ForwardingProxyOptions forwardingProxyOptions = ForwardingProxyOptions.from(proxyConfig); final TrustedProxyCheck.TrustedProxyCheckBuilder proxyCheckBuilder = forwardingProxyOptions.trustedProxyCheckBuilder; if (proxyCheckBuilder == null) { final TrustedProxyCheck allowAllProxyCheck = allowAll(); return new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { root.handle(new ForwardedServerRequestWrapper(event, forwardingProxyOptions, allowAllProxyCheck)); } }; } else { return new ForwardedProxyHandler(proxyCheckBuilder, vertx, root, forwardingProxyOptions); } } return root; } public static void applyHeaders(Map<String, HeaderConfig> headers, Router httpRouteRouter) { if (!headers.isEmpty()) { for (Map.Entry<String, HeaderConfig> entry : headers.entrySet()) { var name = entry.getKey(); var config = entry.getValue(); if (config.methods.isEmpty()) { httpRouteRouter.route(config.path) .order(RouteConstants.ROUTE_ORDER_HEADERS) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().set(name, config.value); event.next(); } }); } else { for (String method : config.methods.get()) { httpRouteRouter.route(HttpMethod.valueOf(method.toUpperCase(Locale.ROOT)), config.path) .order(RouteConstants.ROUTE_ORDER_HEADERS) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().add(name, config.value); event.next(); } }); } } } } } }
class HttpServerCommonHandlers { public static void enforceMaxBodySize(ServerLimitsConfig limits, Router httpRouteRouter) { if (limits.maxBodySize.isPresent()) { long limit = limits.maxBodySize.get().asLongValue(); Long limitObj = limit; httpRouteRouter.route().order(RouteConstants.ROUTE_ORDER_UPLOAD_LIMIT).handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { String lengthString = event.request().headers().get(HttpHeaderNames.CONTENT_LENGTH); if (lengthString != null) { long length = Long.parseLong(lengthString); if (length > limit) { event.response().headers().add(HttpHeaderNames.CONNECTION, "close"); event.response().setStatusCode(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE.code()); event.response().endHandler(new Handler<Void>() { @Override public void handle(Void e) { event.request().connection().close(); } }); event.response().end(); return; } } else { event.put(VertxHttpRecorder.MAX_REQUEST_SIZE_KEY, limitObj); } event.next(); } }); } } public static Handler<HttpServerRequest> enforceDuplicatedContext(Handler<HttpServerRequest> delegate) { return new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { if (!VertxContext.isOnDuplicatedContext()) { Context context = VertxContext.createNewDuplicatedContext(); context.runOnContext(new Handler<Void>() { @Override public void handle(Void x) { setCurrentContextSafe(true); delegate.handle(new ResumingRequestWrapper(event)); } }); } else { setCurrentContextSafe(true); delegate.handle(new ResumingRequestWrapper(event)); } } }; } public static Handler<HttpServerRequest> applyProxy(ProxyConfig proxyConfig, Handler<HttpServerRequest> root, Supplier<Vertx> vertx) { if (proxyConfig.proxyAddressForwarding) { final ForwardingProxyOptions forwardingProxyOptions = ForwardingProxyOptions.from(proxyConfig); final TrustedProxyCheck.TrustedProxyCheckBuilder proxyCheckBuilder = forwardingProxyOptions.trustedProxyCheckBuilder; if (proxyCheckBuilder == null) { final TrustedProxyCheck allowAllProxyCheck = allowAll(); return new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { root.handle(new ForwardedServerRequestWrapper(event, forwardingProxyOptions, allowAllProxyCheck)); } }; } else { return new ForwardedProxyHandler(proxyCheckBuilder, vertx, root, forwardingProxyOptions); } } return root; } private static void addFilterHeaders(RoutingContext event, Map<String, String> headers) { for (var entry : headers.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); MultiMap responseHeaders = event.response().headers(); List<String> oldValues = responseHeaders.getAll(key); if (oldValues.isEmpty()) { responseHeaders.set(key, value); } else { var newValues = new LinkedHashSet<String>(oldValues); boolean added = newValues.add(value); if (added) { responseHeaders.set(key, newValues); } else { } } } } public static void applyHeaders(Map<String, HeaderConfig> headers, Router httpRouteRouter) { if (!headers.isEmpty()) { for (Map.Entry<String, HeaderConfig> entry : headers.entrySet()) { var name = entry.getKey(); var config = entry.getValue(); if (config.methods.isEmpty()) { httpRouteRouter.route(config.path) .order(RouteConstants.ROUTE_ORDER_HEADERS) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().set(name, config.value); event.next(); } }); } else { for (String method : config.methods.get()) { httpRouteRouter.route(HttpMethod.valueOf(method.toUpperCase(Locale.ROOT)), config.path) .order(RouteConstants.ROUTE_ORDER_HEADERS) .handler(new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { event.response().headers().add(name, config.value); event.next(); } }); } } } } } }
Worked without them. Updated in https://github.com/ballerina-platform/ballerina-lang/pull/34427/commits/df432790bb0b85bdac8fd890f881cf4296966849.
private BLangSimpleVariable createSimpleVariable(BField field) { BLangSimpleVariable manualField = new BLangSimpleVariable(); BLangIdentifier name = new BLangIdentifier(); name.setValue(field.name.value); name.pos = field.pos; manualField.setName(name); manualField.flagSet = new HashSet<>(); manualField.flagSet.add(Flag.PUBLIC); manualField.flagSet.add(Flag.REQUIRED); manualField.flagSet.add(Flag.FIELD); manualField.setBType(field.type); manualField.pos = field.pos; manualField.setDeterminedType(field.type); manualField.symbol = field.symbol; if (field.type.tag == TypeTags.RECORD) { BLangUserDefinedType userDefinedTypeNode = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); userDefinedTypeNode.pos = field.pos; userDefinedTypeNode.setBType(field.type); userDefinedTypeNode.setDeterminedType(field.type); userDefinedTypeNode.pkgAlias = new BLangIdentifier(); if (createdTypeDefinitions.containsKey(field.type.tsymbol)) { BLangTypeDefinition createdType = createdTypeDefinitions.get(field.type.tsymbol); userDefinedTypeNode.symbol = createdType.symbol; userDefinedTypeNode.typeName = createdType.name; } manualField.typeNode = userDefinedTypeNode; return manualField; } BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); finiteTypeNode.pos = field.pos; finiteTypeNode.setBType(field.type); finiteTypeNode.setDeterminedType(field.type); finiteTypeNode.valueSpace = new ArrayList<>(); for (BLangExpression value : ((BFiniteType) field.type).getValueSpace()) { if (value.getKind() == NodeKind.NUMERIC_LITERAL) { ((BLangNumericLiteral) value).originalValue = ((BLangNumericLiteral) value).value.toString(); } finiteTypeNode.valueSpace.add(value); } manualField.typeNode = finiteTypeNode; return manualField; }
manualField.typeNode = userDefinedTypeNode;
private BLangSimpleVariable createSimpleVariable(BField field) { BLangSimpleVariable manualField = new BLangSimpleVariable(); BLangIdentifier name = new BLangIdentifier(); name.setValue(field.name.value); name.pos = field.pos; manualField.setName(name); manualField.flagSet = new HashSet<>(); manualField.flagSet.add(Flag.PUBLIC); manualField.flagSet.add(Flag.REQUIRED); manualField.flagSet.add(Flag.FIELD); manualField.setBType(field.type); manualField.pos = field.pos; manualField.setDeterminedType(field.type); manualField.symbol = field.symbol; if (field.type.tag == TypeTags.RECORD) { BLangUserDefinedType userDefinedTypeNode = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); userDefinedTypeNode.pos = field.pos; userDefinedTypeNode.setBType(field.type); userDefinedTypeNode.setDeterminedType(field.type); userDefinedTypeNode.pkgAlias = new BLangIdentifier(); if (createdTypeDefinitions.containsKey(field.type.tsymbol)) { BLangTypeDefinition createdType = createdTypeDefinitions.get(field.type.tsymbol); userDefinedTypeNode.symbol = createdType.symbol; userDefinedTypeNode.typeName = createdType.name; } manualField.typeNode = userDefinedTypeNode; return manualField; } BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); finiteTypeNode.pos = field.pos; finiteTypeNode.setBType(field.type); finiteTypeNode.setDeterminedType(field.type); finiteTypeNode.valueSpace = new ArrayList<>(); for (BLangExpression value : ((BFiniteType) field.type).getValueSpace()) { if (value.getKind() == NodeKind.NUMERIC_LITERAL) { ((BLangNumericLiteral) value).originalValue = ((BLangNumericLiteral) value).value.toString(); } finiteTypeNode.valueSpace.add(value); } manualField.typeNode = finiteTypeNode; return manualField; }
class ConstantValueResolver extends BLangNodeVisitor { private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY = new CompilerContext.Key<>(); private BConstantSymbol currentConstSymbol; private BLangConstantValue result; private BLangDiagnosticLog dlog; private Location currentPos; private BLangAnonymousModelHelper anonymousModelHelper; private SymbolEnv symEnv; private Names names; private SymbolTable symTable; private Types types; private PackageID pkgID; private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>(); private Map<String, BLangConstantValue> constantMap = new HashMap<>(); private ArrayList<BConstantSymbol> resolvingConstants = new ArrayList<>(); private HashSet<BConstantSymbol> unresolvableConstants = new HashSet<>(); private HashMap<BSymbol, BLangTypeDefinition> createdTypeDefinitions = new HashMap<>(); private Map<BConstantSymbol, BType> updatedTypes = new HashMap<>(); private ConstantValueResolver(CompilerContext context) { context.put(CONSTANT_VALUE_RESOLVER_KEY, this); this.dlog = BLangDiagnosticLog.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.types = Types.getInstance(context); } public static ConstantValueResolver getInstance(CompilerContext context) { ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY); if (constantValueResolver == null) { constantValueResolver = new ConstantValueResolver(context); } return constantValueResolver; } public void resolve(List<BLangConstant> constants, PackageID packageID, SymbolEnv symEnv) { this.dlog.setCurrentPackageId(packageID); this.pkgID = packageID; this.symEnv = symEnv; constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant)); constants.forEach(constant -> constant.accept(this)); constantMap.clear(); } @Override public void visit(BLangConstant constant) { if (!unresolvedConstants.containsKey(constant.symbol)) { return; } BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol; this.currentConstSymbol = constant.symbol; this.resolvingConstants.add(this.currentConstSymbol); this.currentConstSymbol.value = visitExpr(constant.expr); this.resolvingConstants.remove(this.currentConstSymbol); updateSymbolType(constant); checkUniqueness(constant); unresolvedConstants.remove(this.currentConstSymbol); this.currentConstSymbol = tempCurrentConstSymbol; } @Override public void visit(BLangLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangNumericLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangConstRef constRef) { this.result = ((BConstantSymbol) constRef.symbol).value; } @Override public void visit(BLangSimpleVarRef varRef) { if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) { this.result = null; return; } BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol; BLangConstantValue constVal = constSymbol.value; if (constVal != null) { this.result = constVal; return; } if (this.currentConstSymbol == constSymbol) { dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name); return; } if (!this.unresolvedConstants.containsKey(constSymbol)) { if (this.unresolvableConstants.contains(constSymbol)) { this.result = null; return; } this.unresolvableConstants.add(constSymbol); dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value); this.result = null; return; } if (this.resolvingConstants.contains(constSymbol)) { for (BConstantSymbol symbol : this.resolvingConstants) { this.unresolvableConstants.add(symbol); } dlog.error(varRef.pos, DiagnosticErrorCode.CONSTANT_CYCLIC_REFERENCE, this.resolvingConstants); this.result = null; return; } this.unresolvedConstants.get(constSymbol).accept(this); this.result = constSymbol.value; } @Override public void visit(BLangRecordLiteral recordLiteral) { Map<String, BLangConstantValue> mapConstVal = new HashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { String key; BLangConstantValue value; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; NodeKind nodeKind = keyValuePair.key.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { key = (String) ((BLangLiteral) keyValuePair.key.expr).value; } else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value; } else { continue; } value = visitExpr(keyValuePair.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; value = visitExpr(varNameField); } else { BLangConstantValue spreadOpConstValue = visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); if (spreadOpConstValue != null) { mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value); } continue; } mapConstVal.put(key, value); } this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType()); } @Override public void visit(BLangBinaryExpr binaryExpr) { BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr); BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr); this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind); } public void visit(BLangGroupExpr groupExpr) { this.result = visitExpr(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { BLangConstantValue value = visitExpr(unaryExpr.expr); this.result = evaluateUnaryOperator(value, unaryExpr.operator); } private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) { if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return calculateAddition(lhs, rhs); case SUB: return calculateSubtract(lhs, rhs); case MUL: return calculateMultiplication(lhs, rhs); case DIV: return calculateDivision(lhs, rhs); case BITWISE_AND: return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b); case BITWISE_OR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b); case BITWISE_LEFT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b); case BITWISE_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b); case BITWISE_UNSIGNED_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b); case BITWISE_XOR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } } catch (NumberFormatException nfe) { } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage()); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) { if (value == null || value.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return new BLangConstantValue(value.value, currentConstSymbol.type); case SUB: return calculateNegation(value); case BITWISE_COMPLEMENT: return calculateBitWiseComplement(value); case NOT: return calculateBooleanComplement(value); } } catch (ClassCastException ce) { } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs, BiFunction<Long, Long, Long> func) { switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: Long val = func.apply((Long) lhs.value, (Long) rhs.value); return new BLangConstantValue(val, this.currentConstSymbol.type); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value + (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) + Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; case TypeTags.STRING: result = String.valueOf(lhs.value) + String.valueOf(rhs.value); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value - (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) - Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value * (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) * Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value / (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) / Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value % (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) % Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateNegation(BLangConstantValue value) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: result = -1 * ((Long) (value.value)); break; case TypeTags.FLOAT: result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value))); break; case TypeTags.DECIMAL: BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128); BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128); BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) { Object result = null; if (this.currentConstSymbol.type.tag == TypeTags.INT) { result = ~((Long) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) { Object result = null; if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) { result = !((Boolean) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue visitExpr(BLangExpression node) { if (!node.typeChecked) { return null; } switch (node.getKind()) { case LITERAL: case NUMERIC_LITERAL: case RECORD_LITERAL_EXPR: case SIMPLE_VARIABLE_REF: case BINARY_EXPR: case GROUP_EXPR: case UNARY_EXPR: BLangConstantValue prevResult = this.result; Location prevPos = this.currentPos; this.currentPos = node.pos; this.result = null; node.accept(this); BLangConstantValue newResult = this.result; this.result = prevResult; this.currentPos = prevPos; return newResult; default: return null; } } private void checkUniqueness(BLangConstant constant) { if (constant.symbol.kind == SymbolKind.CONSTANT) { String nameString = constant.name.value; BLangConstantValue value = constant.symbol.value; if (constantMap.containsKey(nameString)) { if (value == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { BLangConstantValue lastValue = constantMap.get(nameString); if (!value.equals(lastValue)) { if (lastValue == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER, nameString, lastValue); } } } } else { constantMap.put(nameString, value); } } } private void updateSymbolType(BLangConstant constant) { BConstantSymbol symbol = constant.symbol; if (symbol.type.getKind() == TypeKind.FINITE) { updatedTypes.put(constant.symbol, constant.symbol.type); } else if (symbol.value != null) { BType singletonType = checkType(constant.expr, constant.name.value, constant, symbol.value.value, symbol.type, symbol.pos, constant.symbol.value.value); if (singletonType != null) { if (singletonType != null) { constant.symbol.literalType.flags |= Flags.READONLY; if (constant.symbol.type.getKind() == TypeKind.MAP) { BIntersectionType updatedSingletonType = cloneTypeDefinition(constant, constant.symbol, (BIntersectionType) singletonType); constant.symbol.type = updatedSingletonType; constant.expr.setBType(updatedSingletonType.effectiveType); constant.symbol.literalType = updatedSingletonType; symbol.value.type = updatedSingletonType; updatedTypes.put(constant.symbol, updatedSingletonType); return; } constant.symbol.type = singletonType; updatedTypes.put(constant.symbol, singletonType); } } } } private BFiniteType createFiniteType(BLangConstant constant, BLangExpression expr) { BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, 0, Names.EMPTY, constant.symbol.pkgID, null, constant.symbol.owner, constant.symbol.pos, VIRTUAL); BFiniteType finiteType = new BFiniteType(finiteTypeSymbol); finiteType.addValue(expr); return finiteType; } private BType checkType(BLangExpression expr, String name, BLangConstant constant, Object value, BType type, Location pos, Object constValue) { if (expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && updatedTypes.containsKey(((BLangSimpleVarRef) expr).symbol)) { return updatedTypes.get(((BLangSimpleVarRef) expr).symbol); } switch (type.tag) { case TypeTags.INT: case TypeTags.FLOAT: case TypeTags.DECIMAL: BLangNumericLiteral numericLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constant, updateLiteral(numericLiteral, value, type, pos)); case TypeTags.BYTE: BLangNumericLiteral byteLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constant, updateLiteral(byteLiteral, value, symTable.intType, pos)); case TypeTags.STRING: case TypeTags.NIL: case TypeTags.BOOLEAN: BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); return createFiniteType(constant, updateLiteral(literal, value, type, pos)); case TypeTags.MAP: if (value != null) { return createRecordType(expr, name, constant, value, type, pos, constValue); } return null; default: return null; } } private BLangLiteral updateLiteral(BLangLiteral literal, Object value, BType type, Location pos) { literal.value = value; literal.isConstant = true; literal.setBType(type); literal.pos = pos; return literal; } private BField createField(BVarSymbol symbol, BType symbolType, String fieldName, Location pos) { symbol.type = symbolType; if (symbolType.getKind() == TypeKind.INTERSECTION) { for (BType memberType : ((BIntersectionType) symbolType).getConstituentTypes()) { if (memberType.getKind() == TypeKind.RECORD) { symbol.type = memberType; } } } BField field = new BField(Names.fromString(fieldName), pos, symbol); field.symbol.flags |= Flags.REQUIRED; return field; } private void createTypeDefinition(BRecordType originalType, Location pos) { BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) originalType.tsymbol; BTypeDefinitionSymbol typeDefinitionSymbol = Symbols.createTypeDefinitionSymbol(originalType.tsymbol.flags, originalType.tsymbol.name, pkgID, null, symEnv.scope.owner, pos, VIRTUAL); typeDefinitionSymbol.scope = new Scope(typeDefinitionSymbol); typeDefinitionSymbol.scope.define(names.fromString(typeDefinitionSymbol.name.value), typeDefinitionSymbol); originalType.tsymbol.scope = new Scope(originalType.tsymbol); for (BField field : ((HashMap<String, BField>) originalType.fields).values()) { originalType.tsymbol.scope.define(field.name, field.symbol); field.symbol.owner = recordSymbol; } typeDefinitionSymbol.type = originalType; recordSymbol.type = originalType; recordSymbol.typeDefinitionSymbol = typeDefinitionSymbol; recordSymbol.markdownDocumentation = new MarkdownDocAttachment(0); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(), originalType, pos); populateMutableStructureFields(symTable, recordTypeNode, originalType, originalType, pos, symEnv, pkgID); BLangTypeDefinition typeDefinition = TypeDefBuilderHelper.createTypeDefinitionForTSymbol(null, typeDefinitionSymbol, recordTypeNode, symEnv); typeDefinition.pos = pos; typeDefinition.symbol.scope = new Scope(typeDefinition.symbol); typeDefinition.symbol.type = originalType; typeDefinition.flagSet = new HashSet<>(); typeDefinition.flagSet.add(Flag.PUBLIC); typeDefinition.flagSet.add(Flag.ANONYMOUS); createdTypeDefinitions.put(originalType.tsymbol, typeDefinition); } private void populateMutableStructureFields(SymbolTable symTable, BLangStructureTypeNode recordTypeNode, BStructureType recordType, BStructureType origStructureType, Location pos, SymbolEnv env, PackageID pkgID) { BTypeSymbol mutableStructureSymbol = recordType.tsymbol; LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); for (BField origField : origStructureType.fields.values()) { BType mutableFieldType = origField.type; Name origFieldName = origField.name; BVarSymbol mutableFieldSymbol; if (mutableFieldType.tag == TypeTags.INVOKABLE && mutableFieldType.tsymbol != null) { mutableFieldSymbol = new BInvokableSymbol(origField.symbol.tag, origField.symbol.flags, origFieldName, pkgID, mutableFieldType, mutableStructureSymbol, origField.symbol.pos, SOURCE); BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) mutableFieldType.tsymbol; BInvokableSymbol invokableSymbol = (BInvokableSymbol) mutableFieldSymbol; invokableSymbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params); invokableSymbol.restParam = tsymbol.restParam; invokableSymbol.retType = tsymbol.returnType; invokableSymbol.flags = tsymbol.flags; } else if (mutableFieldType == symTable.semanticError) { mutableFieldSymbol = new BVarSymbol(origField.symbol.flags | Flags.OPTIONAL, origFieldName, pkgID, symTable.neverType, mutableStructureSymbol, origField.symbol.pos, VIRTUAL); } else { mutableFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName, pkgID, mutableFieldType, mutableStructureSymbol, origField.symbol.pos, VIRTUAL); } String nameString = origFieldName.value; fields.put(nameString, new BField(origFieldName, null, mutableFieldSymbol)); mutableStructureSymbol.scope.define(origFieldName, mutableFieldSymbol); ((BLangRecordTypeNode) recordTypeNode).fields.add(createSimpleVariable(origField)); } recordType.fields = fields; ((BRecordType) recordType).restFieldType = new BNoType(TypeTags.NONE); if (origStructureType.tag == TypeTags.OBJECT) { return; } BLangUserDefinedType origTypeRef = new BLangUserDefinedType( ASTBuilderUtil.createIdentifier(pos, TypeDefBuilderHelper.getPackageAlias(env, pos.lineRange().filePath(), origStructureType.tsymbol.pkgID)), ASTBuilderUtil.createIdentifier(pos, origStructureType.tsymbol.name.value)); origTypeRef.pos = pos; origTypeRef.setBType(origStructureType); ((BLangRecordTypeNode) recordTypeNode).sealed = true; } private BLangTypeDefinition findTypeDefinition(List<BLangTypeDefinition> typeDefinitionArrayList, String name) { for (int i = typeDefinitionArrayList.size() - 1; i >= 0; i--) { BLangTypeDefinition typeDefinition = typeDefinitionArrayList.get(i); if (typeDefinition.name.value.equals(name)) { return typeDefinition; } } return null; } private BIntersectionType cloneTypeDefinition(BLangConstant constant, BConstantSymbol symbol, BIntersectionType immutableType) { BLangTypeDefinition typeDefinition = findTypeDefinition(symEnv.enclPkg.typeDefinitions, immutableType.effectiveType.tsymbol.name.value); BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDefinition.typeNode; recordTypeNode.symbol.name = symbol.name; BRecordType typeDefinitionType = (BRecordType) typeDefinition.getBType(); BTypeSymbol typeSymbol = typeDefinitionType.getIntersectionType().get().tsymbol; typeSymbol.name = symbol.name; typeSymbol.originalName = symbol.name; BLangIntersectionTypeNode typeNode = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode(); BIntersectionType intersectionType = ((BRecordType) typeDefinition.typeNode.getDeterminedType()).getIntersectionType().get(); typeNode.setBType(intersectionType); typeNode.setDeterminedType(intersectionType); typeNode.pos = symbol.pos; typeNode.constituentTypeNodes = new ArrayList<>(); Set<BType> constituentTypes = ((BRecordType) typeDefinition.typeNode.getDeterminedType()).getIntersectionType().get().getConstituentTypes(); for (BType type : constituentTypes) { if (type.getKind() == TypeKind.READONLY) { BLangValueType readonly = new BLangValueType(TypeKind.READONLY); readonly.setBType(type.tsymbol.type); readonly.setDeterminedType(type.tsymbol.type); readonly.pos = type.tsymbol.pos; typeNode.constituentTypeNodes.add(readonly); } else if (type.getKind() == TypeKind.RECORD) { BLangIdentifier typeName = new BLangIdentifier(); typeName.value = typeName.originalValue = type.tsymbol.name.value; typeName.pos = type.tsymbol.pos; BLangUserDefinedType userDefinedType = new BLangUserDefinedType(new BLangIdentifier(), typeName); userDefinedType.setBType(type.tsymbol.type); userDefinedType.setDeterminedType(type.tsymbol.type); userDefinedType.pos = type.tsymbol.pos; typeNode.constituentTypeNodes.add(userDefinedType); } } BTypeDefinitionSymbol typeDefinitionSymbol = Symbols.createTypeDefinitionSymbol(0, symbol.name, pkgID, null, symEnv.scope.owner, symbol.pos, VIRTUAL); typeDefinitionSymbol.scope = new Scope(typeDefinitionSymbol); typeDefinitionSymbol.scope.define( names.fromString(typeDefinitionSymbol.name.value), typeDefinitionSymbol); typeDefinitionSymbol.type = intersectionType; BLangTypeDefinition newTypeDefinition = TypeDefBuilderHelper.createTypeDefinitionForTSymbol(null, typeDefinitionSymbol, typeNode, symEnv); newTypeDefinition.symbol.type = intersectionType; newTypeDefinition.symbol.scope = new Scope(newTypeDefinition.symbol); newTypeDefinition.pos = symbol.pos; BTypeReferenceType referenceType = new BTypeReferenceType(intersectionType, intersectionType.tsymbol, Flags.READONLY); ((BTypeDefinitionSymbol) newTypeDefinition.symbol).referenceType = referenceType; constant.associatedTypeDefinition = newTypeDefinition; return intersectionType; } private BType createRecordType(BLangExpression expr, String name, BLangConstant constant, Object value, BType type, Location pos, Object constValue) { HashMap<String, BLangConstantValue> constValueMap = (HashMap<String, BLangConstantValue>) constValue; if (constValueMap.size() == 0) { return null; } for (BLangConstantValue memberValue : constValueMap.values()) { if (memberValue == null) { return null; } } BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(SymTag.RECORD, constant.symbol.flags, Names.fromString(name), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); recordTypeSymbol.scope = constant.symbol.scope; BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.tsymbol.name = Names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)); recordType.sealed = true; recordType.restFieldType = new BNoType(TypeTags.NONE); recordTypeSymbol.type = recordType; for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expr).fields) { String key; BVarSymbol newSymbol; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangExpression exprValueField = keyValuePair.valueExpr; if (exprValueField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprValueField; if (updatedTypes.containsKey(simpleVarRefExpr.symbol)) { BType resolvedType = simpleVarRefExpr.symbol.type; recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); keyValuePair.setBType(resolvedType); if (resolvedType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = resolvedType; if (resolvedType.getKind() == TypeKind.INTERSECTION) { simpleVarRefExpr.setBType(((BIntersectionType) resolvedType).effectiveType); } } continue; } } key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType newType = checkType(exprValueField, key, constant, ((BLangConstantValue) ((HashMap) value).get(key)).value, ((BLangConstantValue) ((HashMap) value).get(key)).type, pos, constValueMap.get(key).value); if (newType == null) { return null; } keyValuePair.setBType(newType); if (newType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = newType; if (newType.getKind() == TypeKind.INTERSECTION) { exprValueField.setBType(((BIntersectionType) newType).effectiveType); } } recordType.fields.put(key, createField(newSymbol, newType, key, pos)); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType resolvedType = varNameField.symbol.type; varNameField.setBType(resolvedType); recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); continue; } else { BLangExpression exprSpreadField = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; if (exprSpreadField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) exprSpreadField; if (updatedTypes.containsKey(simpleVarRef.symbol)) { BRecordType resolvedType = (BRecordType) ((BIntersectionType) simpleVarRef.symbol.type).effectiveType; exprSpreadField.setBType(resolvedType); for (String spreadFieldKeys : ((HashMap<String, BField>) resolvedType.fields).keySet()) { newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(spreadFieldKeys), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType spreadFieldType = resolvedType.fields.get(spreadFieldKeys).type; recordType.fields.put(spreadFieldKeys, createField(newSymbol, spreadFieldType, spreadFieldKeys, pos)); } } continue; } } } createTypeDefinition(recordType, pos); BIntersectionType intersectionType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, recordType, symEnv, symTable, anonymousModelHelper, names, new HashSet<>()); return intersectionType; } }
class ConstantValueResolver extends BLangNodeVisitor { private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY = new CompilerContext.Key<>(); private BConstantSymbol currentConstSymbol; private BLangConstantValue result; private BLangDiagnosticLog dlog; private Location currentPos; private BLangAnonymousModelHelper anonymousModelHelper; private SymbolEnv symEnv; private Names names; private SymbolTable symTable; private Types types; private PackageID pkgID; private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>(); private Map<String, BLangConstantValue> constantMap = new HashMap<>(); private ArrayList<BConstantSymbol> resolvingConstants = new ArrayList<>(); private HashSet<BConstantSymbol> unresolvableConstants = new HashSet<>(); private HashMap<BSymbol, BLangTypeDefinition> createdTypeDefinitions = new HashMap<>(); private Map<BConstantSymbol, BType> updatedTypes = new HashMap<>(); private ConstantValueResolver(CompilerContext context) { context.put(CONSTANT_VALUE_RESOLVER_KEY, this); this.dlog = BLangDiagnosticLog.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.types = Types.getInstance(context); } public static ConstantValueResolver getInstance(CompilerContext context) { ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY); if (constantValueResolver == null) { constantValueResolver = new ConstantValueResolver(context); } return constantValueResolver; } public void resolve(List<BLangConstant> constants, PackageID packageID, SymbolEnv symEnv) { this.dlog.setCurrentPackageId(packageID); this.pkgID = packageID; this.symEnv = symEnv; constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant)); constants.forEach(constant -> constant.accept(this)); constantMap.clear(); } @Override public void visit(BLangConstant constant) { if (!unresolvedConstants.containsKey(constant.symbol)) { return; } BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol; this.currentConstSymbol = constant.symbol; this.resolvingConstants.add(this.currentConstSymbol); this.currentConstSymbol.value = visitExpr(constant.expr); this.resolvingConstants.remove(this.currentConstSymbol); updateSymbolType(constant); checkUniqueness(constant); unresolvedConstants.remove(this.currentConstSymbol); this.currentConstSymbol = tempCurrentConstSymbol; } @Override public void visit(BLangLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangNumericLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangConstRef constRef) { this.result = ((BConstantSymbol) constRef.symbol).value; } @Override public void visit(BLangSimpleVarRef varRef) { if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) { this.result = null; return; } BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol; BLangConstantValue constVal = constSymbol.value; if (constVal != null) { this.result = constVal; return; } if (this.currentConstSymbol == constSymbol) { dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name); return; } if (!this.unresolvedConstants.containsKey(constSymbol)) { if (this.unresolvableConstants.contains(constSymbol)) { this.result = null; return; } this.unresolvableConstants.add(constSymbol); dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value); this.result = null; return; } if (this.resolvingConstants.contains(constSymbol)) { for (BConstantSymbol symbol : this.resolvingConstants) { this.unresolvableConstants.add(symbol); } dlog.error(varRef.pos, DiagnosticErrorCode.CONSTANT_CYCLIC_REFERENCE, this.resolvingConstants); this.result = null; return; } this.unresolvedConstants.get(constSymbol).accept(this); this.result = constSymbol.value; } @Override public void visit(BLangRecordLiteral recordLiteral) { Map<String, BLangConstantValue> mapConstVal = new HashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { String key; BLangConstantValue value; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; NodeKind nodeKind = keyValuePair.key.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { key = (String) ((BLangLiteral) keyValuePair.key.expr).value; } else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value; } else { continue; } value = visitExpr(keyValuePair.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; value = visitExpr(varNameField); } else { BLangConstantValue spreadOpConstValue = visitExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); if (spreadOpConstValue != null) { mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value); } continue; } mapConstVal.put(key, value); } this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType()); } @Override public void visit(BLangBinaryExpr binaryExpr) { BLangConstantValue lhs = visitExpr(binaryExpr.lhsExpr); BLangConstantValue rhs = visitExpr(binaryExpr.rhsExpr); this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind); } public void visit(BLangGroupExpr groupExpr) { this.result = visitExpr(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { BLangConstantValue value = visitExpr(unaryExpr.expr); this.result = evaluateUnaryOperator(value, unaryExpr.operator); } private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) { if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return calculateAddition(lhs, rhs); case SUB: return calculateSubtract(lhs, rhs); case MUL: return calculateMultiplication(lhs, rhs); case DIV: return calculateDivision(lhs, rhs); case BITWISE_AND: return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b); case BITWISE_OR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b); case BITWISE_LEFT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b); case BITWISE_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b); case BITWISE_UNSIGNED_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b); case BITWISE_XOR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } } catch (NumberFormatException nfe) { } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage()); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) { if (value == null || value.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return new BLangConstantValue(value.value, currentConstSymbol.type); case SUB: return calculateNegation(value); case BITWISE_COMPLEMENT: return calculateBitWiseComplement(value); case NOT: return calculateBooleanComplement(value); } } catch (ClassCastException ce) { } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs, BiFunction<Long, Long, Long> func) { switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: Long val = func.apply((Long) lhs.value, (Long) rhs.value); return new BLangConstantValue(val, this.currentConstSymbol.type); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value + (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) + Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; case TypeTags.STRING: result = String.valueOf(lhs.value) + String.valueOf(rhs.value); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value - (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) - Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) lhs.value * (Long) rhs.value; break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) * Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value / (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) / Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value % (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) % Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateNegation(BLangConstantValue value) { Object result = null; switch (this.currentConstSymbol.type.tag) { case TypeTags.INT: result = -1 * ((Long) (value.value)); break; case TypeTags.FLOAT: result = String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value))); break; case TypeTags.DECIMAL: BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128); BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128); BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) { Object result = null; if (this.currentConstSymbol.type.tag == TypeTags.INT) { result = ~((Long) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) { Object result = null; if (this.currentConstSymbol.type.tag == TypeTags.BOOLEAN) { result = !((Boolean) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue visitExpr(BLangExpression node) { if (!node.typeChecked) { return null; } switch (node.getKind()) { case LITERAL: case NUMERIC_LITERAL: case RECORD_LITERAL_EXPR: case SIMPLE_VARIABLE_REF: case BINARY_EXPR: case GROUP_EXPR: case UNARY_EXPR: BLangConstantValue prevResult = this.result; Location prevPos = this.currentPos; this.currentPos = node.pos; this.result = null; node.accept(this); BLangConstantValue newResult = this.result; this.result = prevResult; this.currentPos = prevPos; return newResult; default: return null; } } private void checkUniqueness(BLangConstant constant) { if (constant.symbol.kind == SymbolKind.CONSTANT) { String nameString = constant.name.value; BLangConstantValue value = constant.symbol.value; if (constantMap.containsKey(nameString)) { if (value == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { BLangConstantValue lastValue = constantMap.get(nameString); if (!value.equals(lastValue)) { if (lastValue == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER, nameString, lastValue); } } } } else { constantMap.put(nameString, value); } } } private void updateSymbolType(BLangConstant constant) { BConstantSymbol symbol = constant.symbol; if (symbol.type.getKind() == TypeKind.FINITE) { updatedTypes.put(constant.symbol, constant.symbol.type); } else if (symbol.value != null) { BType resolvedType = checkType(constant.expr, constant, symbol.value.value, symbol.type, symbol.pos, constant.symbol.value.value); if (resolvedType != null) { if (resolvedType != null) { constant.symbol.literalType.flags |= Flags.READONLY; if (constant.symbol.type.getKind() == TypeKind.MAP && resolvedType.getKind() == TypeKind.INTERSECTION) { addassociatedTypeDefinition(constant, (BIntersectionType) resolvedType); constant.symbol.type = resolvedType; constant.expr.setBType(((BIntersectionType) resolvedType).effectiveType); constant.symbol.literalType = resolvedType; symbol.value.type = resolvedType; updatedTypes.put(constant.symbol, resolvedType); return; } constant.expr.setBType(resolvedType); constant.symbol.type = resolvedType; updatedTypes.put(constant.symbol, resolvedType); } } } } private BFiniteType createFiniteType(BLangConstant constant, BLangExpression expr) { BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, constant.symbol.flags, Names.EMPTY, constant.symbol.pkgID, null, constant.symbol.owner, constant.symbol.pos, VIRTUAL); BFiniteType finiteType = new BFiniteType(finiteTypeSymbol); finiteType.addValue(expr); return finiteType; } private BType checkType(BLangExpression expr, BLangConstant constant, Object value, BType type, Location pos, Object constValue) { if (expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && updatedTypes.containsKey(((BLangSimpleVarRef) expr).symbol)) { return updatedTypes.get(((BLangSimpleVarRef) expr).symbol); } switch (type.tag) { case TypeTags.INT: case TypeTags.FLOAT: case TypeTags.DECIMAL: BLangNumericLiteral numericLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constant, updateLiteral(numericLiteral, value, type, pos)); case TypeTags.BYTE: BLangNumericLiteral byteLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constant, updateLiteral(byteLiteral, value, symTable.intType, pos)); case TypeTags.STRING: case TypeTags.NIL: case TypeTags.BOOLEAN: BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); return createFiniteType(constant, updateLiteral(literal, value, type, pos)); case TypeTags.MAP: if (value != null) { return createRecordType(expr, constant, value, pos, constValue); } return null; default: return null; } } private BLangLiteral updateLiteral(BLangLiteral literal, Object value, BType type, Location pos) { literal.value = value; literal.isConstant = true; literal.setBType(type); literal.pos = pos; return literal; } private BField createField(BVarSymbol symbol, BType symbolType, String fieldName, Location pos) { symbol.type = symbolType; if (symbolType.getKind() == TypeKind.INTERSECTION) { for (BType memberType : ((BIntersectionType) symbolType).getConstituentTypes()) { if (memberType.getKind() == TypeKind.RECORD) { symbol.type = memberType; } } } BField field = new BField(Names.fromString(fieldName), pos, symbol); field.symbol.flags |= Flags.REQUIRED; return field; } private void createTypeDefinition(BRecordType originalType, Location pos) { BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) originalType.tsymbol; BTypeDefinitionSymbol typeDefinitionSymbol = Symbols.createTypeDefinitionSymbol(originalType.tsymbol.flags, originalType.tsymbol.name, pkgID, null, symEnv.scope.owner, pos, VIRTUAL); typeDefinitionSymbol.scope = new Scope(typeDefinitionSymbol); typeDefinitionSymbol.scope.define(names.fromString(typeDefinitionSymbol.name.value), typeDefinitionSymbol); originalType.tsymbol.scope = new Scope(originalType.tsymbol); for (BField field : ((HashMap<String, BField>) originalType.fields).values()) { originalType.tsymbol.scope.define(field.name, field.symbol); field.symbol.owner = recordSymbol; } typeDefinitionSymbol.type = originalType; recordSymbol.type = originalType; recordSymbol.typeDefinitionSymbol = typeDefinitionSymbol; recordSymbol.markdownDocumentation = new MarkdownDocAttachment(0); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(), originalType, pos); populateMutableStructureFields(symTable, recordTypeNode, originalType, originalType, pos, symEnv, pkgID); BLangTypeDefinition typeDefinition = TypeDefBuilderHelper.createTypeDefinitionForTSymbol(null, typeDefinitionSymbol, recordTypeNode, symEnv); typeDefinition.pos = pos; typeDefinition.symbol.scope = new Scope(typeDefinition.symbol); typeDefinition.symbol.type = originalType; typeDefinition.flagSet = new HashSet<>(); typeDefinition.flagSet.add(Flag.PUBLIC); typeDefinition.flagSet.add(Flag.ANONYMOUS); createdTypeDefinitions.put(originalType.tsymbol, typeDefinition); } private void populateMutableStructureFields(SymbolTable symTable, BLangStructureTypeNode recordTypeNode, BStructureType recordType, BStructureType origStructureType, Location pos, SymbolEnv env, PackageID pkgID) { BTypeSymbol mutableStructureSymbol = recordType.tsymbol; LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); for (BField origField : origStructureType.fields.values()) { BType mutableFieldType = origField.type; Name origFieldName = origField.name; BVarSymbol mutableFieldSymbol; if (mutableFieldType.tag == TypeTags.INVOKABLE && mutableFieldType.tsymbol != null) { mutableFieldSymbol = new BInvokableSymbol(origField.symbol.tag, origField.symbol.flags, origFieldName, pkgID, mutableFieldType, mutableStructureSymbol, origField.symbol.pos, SOURCE); BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) mutableFieldType.tsymbol; BInvokableSymbol invokableSymbol = (BInvokableSymbol) mutableFieldSymbol; invokableSymbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params); invokableSymbol.restParam = tsymbol.restParam; invokableSymbol.retType = tsymbol.returnType; invokableSymbol.flags = tsymbol.flags; } else if (mutableFieldType == symTable.semanticError) { mutableFieldSymbol = new BVarSymbol(origField.symbol.flags | Flags.OPTIONAL, origFieldName, pkgID, symTable.neverType, mutableStructureSymbol, origField.symbol.pos, VIRTUAL); } else { mutableFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName, pkgID, mutableFieldType, mutableStructureSymbol, origField.symbol.pos, VIRTUAL); } String nameString = origFieldName.value; fields.put(nameString, new BField(origFieldName, null, mutableFieldSymbol)); mutableStructureSymbol.scope.define(origFieldName, mutableFieldSymbol); ((BLangRecordTypeNode) recordTypeNode).fields.add(createSimpleVariable(origField)); } recordType.fields = fields; ((BRecordType) recordType).restFieldType = new BNoType(TypeTags.NONE); if (origStructureType.tag == TypeTags.OBJECT) { return; } BLangUserDefinedType origTypeRef = new BLangUserDefinedType( ASTBuilderUtil.createIdentifier(pos, TypeDefBuilderHelper.getPackageAlias(env, pos.lineRange().filePath(), origStructureType.tsymbol.pkgID)), ASTBuilderUtil.createIdentifier(pos, origStructureType.tsymbol.name.value)); origTypeRef.pos = pos; origTypeRef.setBType(origStructureType); ((BLangRecordTypeNode) recordTypeNode).sealed = true; } private BLangTypeDefinition findTypeDefinition(List<BLangTypeDefinition> typeDefinitionArrayList, String name) { for (int i = typeDefinitionArrayList.size() - 1; i >= 0; i--) { BLangTypeDefinition typeDefinition = typeDefinitionArrayList.get(i); if (typeDefinition.name.value.equals(name)) { return typeDefinition; } } return null; } private void addassociatedTypeDefinition(BLangConstant constant, BIntersectionType immutableType) { BLangTypeDefinition typeDefinition = findTypeDefinition(symEnv.enclPkg.typeDefinitions, immutableType.effectiveType.tsymbol.name.value); constant.associatedTypeDefinition = typeDefinition; } private BType createRecordType(BLangExpression expr, BLangConstant constant, Object value, Location pos, Object constValue) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return expr.getBType(); } HashMap<String, BLangConstantValue> constValueMap = (HashMap<String, BLangConstantValue>) constValue; if (constValueMap.size() == 0) { return null; } for (BLangConstantValue memberValue : constValueMap.values()) { if (memberValue == null) { return null; } } Name genName = Names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)); BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(SymTag.RECORD, constant.symbol.flags | Flags.ANONYMOUS, genName, constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); recordTypeSymbol.scope = constant.symbol.scope; BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.tsymbol.name = genName; recordType.sealed = true; recordType.restFieldType = new BNoType(TypeTags.NONE); recordTypeSymbol.type = recordType; for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expr).fields) { String key; BVarSymbol newSymbol; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangExpression exprValueField = keyValuePair.valueExpr; if (exprValueField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprValueField; if (updatedTypes.containsKey(simpleVarRefExpr.symbol)) { BType resolvedType = simpleVarRefExpr.symbol.type; recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); keyValuePair.setBType(resolvedType); if (resolvedType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = resolvedType; if (resolvedType.getKind() == TypeKind.INTERSECTION) { simpleVarRefExpr.setBType(((BIntersectionType) resolvedType).effectiveType); } } continue; } } key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType newType = checkType(exprValueField, constant, ((BLangConstantValue) ((HashMap) value).get(key)).value, ((BLangConstantValue) ((HashMap) value).get(key)).type, pos, constValueMap.get(key).value); if (newType == null) { return null; } keyValuePair.setBType(newType); if (newType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = newType; if (newType.getKind() == TypeKind.INTERSECTION) { exprValueField.setBType(((BIntersectionType) newType).effectiveType); } } recordType.fields.put(key, createField(newSymbol, newType, key, pos)); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(key), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType resolvedType = varNameField.symbol.type; varNameField.setBType(resolvedType); recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); continue; } else { BLangExpression exprSpreadField = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; if (exprSpreadField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) exprSpreadField; if (updatedTypes.containsKey(simpleVarRef.symbol)) { BRecordType resolvedType = (BRecordType) ((BIntersectionType) simpleVarRef.symbol.type).effectiveType; exprSpreadField.setBType(resolvedType); for (String spreadFieldKeys : ((HashMap<String, BField>) resolvedType.fields).keySet()) { newSymbol = new BVarSymbol(constant.symbol.flags, Names.fromString(spreadFieldKeys), constant.symbol.pkgID, null, constant.symbol.owner, pos, VIRTUAL); BType spreadFieldType = resolvedType.fields.get(spreadFieldKeys).type; recordType.fields.put(spreadFieldKeys, createField(newSymbol, spreadFieldType, spreadFieldKeys, pos)); } } continue; } } } createTypeDefinition(recordType, pos); BIntersectionType intersectionType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, recordType, symEnv, symTable, anonymousModelHelper, names, new HashSet<>()); return intersectionType; } }
We could also, extend `ObjectOps` class with `CommonOps` similar to other places and get rid of overrides to union, intersect, and diff() here.
private static boolean objectBddIsEmpty(Context cx, Bdd b) { return bddEveryPositive(cx, b, null, null, MappingOps::mappingFormulaIsEmpty); }
return bddEveryPositive(cx, b, null, null, MappingOps::mappingFormulaIsEmpty);
private static boolean objectBddIsEmpty(Context cx, Bdd b) { return bddEveryPositive(cx, b, null, null, MappingOps::mappingFormulaIsEmpty); }
class ObjectOps implements BasicTypeOps { @Override public SubtypeData union(SubtypeData t1, SubtypeData t2) { return bddSubtypeUnion(t1, t2); } @Override public SubtypeData intersect(SubtypeData t1, SubtypeData t2) { return bddSubtypeIntersect(t1, t2); } @Override public SubtypeData diff(SubtypeData t1, SubtypeData t2) { return bddSubtypeDiff(t1, t2); } @Override public SubtypeData complement(SubtypeData t) { return objectSubTypeComplement(t); } @Override public boolean isEmpty(Context cx, SubtypeData t) { return objectSubTypeIsEmpty(cx, t); } private static boolean objectSubTypeIsEmpty(Context cx, SubtypeData t) { return memoSubtypeIsEmpty(cx, cx.mappingMemo, ObjectOps::objectBddIsEmpty, (Bdd) t); } private SubtypeData objectSubTypeComplement(SubtypeData t) { return bddSubtypeDiff(MAPPING_SUBTYPE_OBJECT, t); } }
class ObjectOps extends CommonOps implements BasicTypeOps { @Override public SubtypeData complement(SubtypeData t) { return objectSubTypeComplement(t); } @Override public boolean isEmpty(Context cx, SubtypeData t) { return objectSubTypeIsEmpty(cx, t); } private static boolean objectSubTypeIsEmpty(Context cx, SubtypeData t) { return memoSubtypeIsEmpty(cx, cx.mappingMemo, ObjectOps::objectBddIsEmpty, (Bdd) t); } private SubtypeData objectSubTypeComplement(SubtypeData t) { return bddSubtypeDiff(MAPPING_SUBTYPE_OBJECT, t); } }
repeated code with line 360-390
private ScalarOperator createNewAggFunction(ScalarOperator arg0, ScalarOperator arg1, Type returnType) { if (arg0.isConstant()) { AggregateFunction countFunction = AggregateFunction.createBuiltin( FunctionSet.COUNT, Lists.newArrayList(arg1.getType()), Type.BIGINT, Type.BIGINT, false, true, true); List<ScalarOperator> countArguments = Lists.newArrayList(); if (arg1.isNullable()) { ColumnRefOperator newColumnRef; if (arg1.isColumnRef()) { newColumnRef = (ColumnRefOperator) arg1; if (!oldPreAggProjections.containsKey(arg1)) { newPreAggProjections.put((ColumnRefOperator) arg1, arg1); } } else if (commonArguments.containsKey(arg1)) { newColumnRef = commonArguments.get(arg1); } else { newColumnRef = columnRefFactory.create(arg1, arg1.getType(), arg1.isNullable()); newPreAggProjections.put(newColumnRef, arg1); commonArguments.put(arg1, newColumnRef); } countArguments.add(newColumnRef); } CallOperator newAggFunction = new CallOperator(FunctionSet.COUNT, Type.BIGINT, countArguments, countFunction); ColumnRefOperator newAggRef = columnRefFactory.create( newAggFunction, newAggFunction.getType(), true); newAggregations.put(newAggRef, newAggFunction); ScalarOperator countOperator = null; ScalarOperator constOperator = null; PrimitiveType primitiveType = returnType.getPrimitiveType(); switch (primitiveType) { case DECIMAL128: countOperator = new CastOperator( ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 18, 0), newAggRef, false); int precision = ((ScalarType) arg0.getType()).getScalarPrecision(); int scale = ((ScalarType) arg0.getType()).getScalarScale(); Type constType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, precision, scale); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(constType) : ConstantOperator.createDecimal(new BigDecimal(arg0.toString()), constType); break; case DOUBLE: countOperator = new CastOperator(returnType, newAggRef, false); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.DOUBLE) : ConstantOperator.createDouble(((ConstantOperator) arg0).getDouble()); break; case LARGEINT: countOperator = new CastOperator(returnType, newAggRef, false); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.LARGEINT) : ConstantOperator.createLargeInt(new BigInteger(arg0.toString())); break; case BIGINT: countOperator = newAggRef; constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.BIGINT) : ConstantOperator.createBigint(Long.parseLong(arg0.toString())); break; default: Preconditions.checkState(false, "unexpected sum function result type"); } Function multiplyFn = GlobalStateMgr.getCurrentState().getFunction( new Function(new FunctionName(FunctionSet.MULTIPLY), Lists.newArrayList(countOperator.getType(), constOperator.getType()), returnType, false), Function.CompareMode.IS_IDENTICAL); Preconditions.checkState(multiplyFn != null, "cannot find function multiply"); CallOperator newMultiply = new CallOperator(FunctionSet.MULTIPLY, returnType, Lists.newArrayList( countOperator, constOperator), multiplyFn); return newMultiply; } else { ColumnRefOperator newColumnRef; if (arg0.isColumnRef()) { newColumnRef = (ColumnRefOperator) arg0; if (!oldPreAggProjections.containsKey(arg0)) { newPreAggProjections.put((ColumnRefOperator) arg0, arg0); } } else if (commonArguments.containsKey(arg0)) { newColumnRef = commonArguments.get(arg0); } else { newColumnRef = columnRefFactory.create(arg0, arg0.getType(), arg0.isNullable()); newPreAggProjections.put(newColumnRef, arg0); commonArguments.put(arg0, newColumnRef); } Type sumFunctionType = returnType; if (returnType.isDecimalV3()) { int argScale = ((ScalarType) arg0.getType()).getScalarScale(); sumFunctionType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, argScale); } AggregateFunction sumFunction = AggregateFunction.createBuiltin( FunctionSet.SUM, Lists.newArrayList(arg0.getType()), sumFunctionType, sumFunctionType, false, true, false); CallOperator newAggFunction = new CallOperator(FunctionSet.SUM, sumFunctionType, Lists.newArrayList(newColumnRef), sumFunction); ColumnRefOperator newAggRef = columnRefFactory.create( newAggFunction, newAggFunction.getType(), true); newAggregations.put(newAggRef, newAggFunction); return newAggRef; } }
if (arg1.isColumnRef()) {
private ScalarOperator createNewAggFunction(ScalarOperator arg0, ScalarOperator arg1, Type returnType) { if (arg0.isConstant()) { AggregateFunction countFunction = AggregateFunction.createBuiltin( FunctionSet.COUNT, Lists.newArrayList(arg1.getType()), Type.BIGINT, Type.BIGINT, false, true, true); List<ScalarOperator> countArguments = Lists.newArrayList(); if (arg1.isNullable()) { countArguments.add(createColumnRefForAggArgument(arg1)); } CallOperator newAggFunction = new CallOperator(FunctionSet.COUNT, Type.BIGINT, countArguments, countFunction); ColumnRefOperator newAggRef = columnRefFactory.create( newAggFunction, newAggFunction.getType(), true); newAggregations.put(newAggRef, newAggFunction); ScalarOperator countOperator = null; ScalarOperator constOperator = null; PrimitiveType primitiveType = returnType.getPrimitiveType(); switch (primitiveType) { case DECIMAL128: countOperator = new CastOperator( ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 18, 0), newAggRef, false); int precision = ((ScalarType) arg0.getType()).getScalarPrecision(); int scale = ((ScalarType) arg0.getType()).getScalarScale(); Type constType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, precision, scale); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(constType) : ConstantOperator.createDecimal(new BigDecimal(arg0.toString()), constType); break; case DOUBLE: countOperator = new CastOperator(returnType, newAggRef, false); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.DOUBLE) : ConstantOperator.createDouble(((ConstantOperator) arg0).getDouble()); break; case LARGEINT: countOperator = new CastOperator(returnType, newAggRef, false); constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.LARGEINT) : ConstantOperator.createLargeInt(new BigInteger(arg0.toString())); break; case BIGINT: countOperator = newAggRef; constOperator = arg0.isConstantNull() ? ConstantOperator.createNull(Type.BIGINT) : ConstantOperator.createBigint(Long.parseLong(arg0.toString())); break; default: Preconditions.checkState(false, "unexpected sum function result type"); } Function multiplyFn = GlobalStateMgr.getCurrentState().getFunction( new Function(new FunctionName(FunctionSet.MULTIPLY), Lists.newArrayList(countOperator.getType(), constOperator.getType()), returnType, false), Function.CompareMode.IS_IDENTICAL); Preconditions.checkState(multiplyFn != null, "cannot find function multiply"); CallOperator newMultiply = new CallOperator(FunctionSet.MULTIPLY, returnType, Lists.newArrayList( countOperator, constOperator), multiplyFn); return newMultiply; } else { ColumnRefOperator newColumnRef = createColumnRefForAggArgument(arg0); Type sumFunctionType = returnType; if (returnType.isDecimalV3()) { int argScale = ((ScalarType) arg0.getType()).getScalarScale(); sumFunctionType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, argScale); } AggregateFunction sumFunction = AggregateFunction.createBuiltin( FunctionSet.SUM, Lists.newArrayList(arg0.getType()), sumFunctionType, sumFunctionType, false, true, false); CallOperator newAggFunction = new CallOperator(FunctionSet.SUM, sumFunctionType, Lists.newArrayList(newColumnRef), sumFunction); ColumnRefOperator newAggRef = columnRefFactory.create( newAggFunction, newAggFunction.getType(), true); newAggregations.put(newAggRef, newAggFunction); return newAggRef; } }
class AggFunctionRewriter { public Map<ColumnRefOperator, CallOperator> oldAggregations; public Map<ColumnRefOperator, ScalarOperator> oldPreAggProjections; public ColumnRefFactory columnRefFactory; public Map<ColumnRefOperator, ScalarOperator> newPostAggProjections; public Map<ColumnRefOperator, CallOperator> newAggregations; public Map<ColumnRefOperator, ScalarOperator> newPreAggProjections; public Map<ColumnRefOperator, CallOperator> reservedAggregations; private Map<ScalarOperator, ColumnRefOperator> commonArguments; public AggFunctionRewriter(Map<ColumnRefOperator, CallOperator> oldAggregations, Map<ColumnRefOperator, ScalarOperator> oldPreAggProjections, ColumnRefFactory columnRefFactory, Map<ColumnRefOperator, ScalarOperator> newPreAggProjections, Map<ColumnRefOperator, ScalarOperator> newPostAggProjections) { this.oldAggregations = oldAggregations; this.oldPreAggProjections = oldPreAggProjections; this.columnRefFactory = columnRefFactory; this.newPostAggProjections = newPostAggProjections; this.newAggregations = Maps.newHashMap(); this.newPreAggProjections = newPreAggProjections; this.reservedAggregations = Maps.newHashMap(); this.commonArguments = Maps.newHashMap(); } public ScalarOperator rewrite(ColumnRefOperator op, CallOperator aggFunction) { if (aggFunction.isAggregate() && !aggFunction.isDistinct() && aggFunction.getFnName().equals(FunctionSet.SUM) && !aggFunction.getType().isDecimalV2()) { ScalarOperator aggExpr = aggFunction.getArguments().get(0); if (aggExpr.isColumnRef()) { ColumnRefOperator aggColumnRef = (ColumnRefOperator) aggExpr; ScalarOperator aggColumnExpr = oldPreAggProjections.get(aggColumnRef); if (aggColumnExpr.getOpType() == OperatorType.CALL) { CallOperator callOperator = (CallOperator) aggColumnExpr; String functionName = callOperator.getFnName(); if (functionName.equals(FunctionSet.ADD) || functionName.equals(FunctionSet.SUBTRACT)) { List<ScalarOperator> arguments = callOperator.getArguments(); Preconditions.checkState(arguments.size() == 2); if (arguments.get(0).isConstant() || arguments.get(1).isConstant()) { ScalarOperator newArg0 = rewriteCastForAggExpr(arguments.get(0)); ScalarOperator newArg1 = rewriteCastForAggExpr(arguments.get(1)); ScalarOperator agg0 = createNewAggFunction( newArg0, newArg1, aggFunction.getType()); ScalarOperator agg1 = createNewAggFunction( newArg1, newArg0, aggFunction.getType()); Function newFn = GlobalStateMgr.getCurrentState().getFunction( new Function(new FunctionName(functionName), Lists.newArrayList(agg0.getType(), agg1.getType()), aggFunction.getType(), false), Function.CompareMode.IS_IDENTICAL); Preconditions.checkState(newFn != null, "cannot find function " + functionName); CallOperator newOperator = new CallOperator(functionName, aggFunction.getType(), Lists.newArrayList(agg0, agg1), newFn); return newOperator; } } } } } reservedAggregations.put(op, oldAggregations.get(op)); return op; } private ScalarOperator rewriteCastForAggExpr(ScalarOperator expr) { if (expr instanceof CastOperator) { CastOperator castOperator = (CastOperator) expr; Type toType = castOperator.getType(); ScalarOperator fromOperator = castOperator.getChild(0); Type fromType = fromOperator.getType(); if (ScalarType.isFullyCompatible(fromType, toType)) { AggregateFunction possibleAggregateFunction = AggregateFunction.createBuiltin( FunctionSet.SUM, Lists.newArrayList(fromType), toType, toType, false, true, false); if (GlobalStateMgr.getCurrentState().getFunction( possibleAggregateFunction, Function.CompareMode.IS_IDENTICAL) != null) { return rewriteCastForAggExpr(fromOperator); } } } return expr; } }
class AggFunctionRewriter { public Map<ColumnRefOperator, CallOperator> oldAggregations; public Map<ColumnRefOperator, ScalarOperator> oldPreAggProjections; public ColumnRefFactory columnRefFactory; public Map<ColumnRefOperator, ScalarOperator> newPostAggProjections; public Map<ColumnRefOperator, CallOperator> newAggregations; public Map<ColumnRefOperator, ScalarOperator> newPreAggProjections; public Map<ColumnRefOperator, CallOperator> reservedAggregations; private Map<ScalarOperator, ColumnRefOperator> commonArguments; public AggFunctionRewriter(Map<ColumnRefOperator, CallOperator> oldAggregations, Map<ColumnRefOperator, ScalarOperator> oldPreAggProjections, ColumnRefFactory columnRefFactory, Map<ColumnRefOperator, ScalarOperator> newPreAggProjections, Map<ColumnRefOperator, ScalarOperator> newPostAggProjections) { this.oldAggregations = oldAggregations; this.oldPreAggProjections = oldPreAggProjections; this.columnRefFactory = columnRefFactory; this.newPostAggProjections = newPostAggProjections; this.newAggregations = Maps.newHashMap(); this.newPreAggProjections = newPreAggProjections; this.reservedAggregations = Maps.newHashMap(); this.commonArguments = Maps.newHashMap(); } public ScalarOperator rewrite(ColumnRefOperator op, CallOperator aggFunction) { if (aggFunction.isAggregate() && !aggFunction.isDistinct() && aggFunction.getFnName().equals(FunctionSet.SUM) && !aggFunction.getType().isDecimalV2()) { ScalarOperator aggExpr = aggFunction.getArguments().get(0); if (aggExpr.isColumnRef()) { ColumnRefOperator aggColumnRef = (ColumnRefOperator) aggExpr; ScalarOperator aggColumnExpr = oldPreAggProjections.get(aggColumnRef); if (aggColumnExpr.getOpType() == OperatorType.CALL) { CallOperator callOperator = (CallOperator) aggColumnExpr; String functionName = callOperator.getFnName(); if (functionName.equals(FunctionSet.ADD) || functionName.equals(FunctionSet.SUBTRACT)) { List<ScalarOperator> arguments = callOperator.getArguments(); Preconditions.checkState(arguments.size() == 2); if (arguments.get(0).isConstant() || arguments.get(1).isConstant()) { ScalarOperator newArg0 = rewriteCastForAggExpr(arguments.get(0)); ScalarOperator newArg1 = rewriteCastForAggExpr(arguments.get(1)); ScalarOperator agg0 = createNewAggFunction( newArg0, newArg1, aggFunction.getType()); ScalarOperator agg1 = createNewAggFunction( newArg1, newArg0, aggFunction.getType()); Function newFn = GlobalStateMgr.getCurrentState().getFunction( new Function(new FunctionName(functionName), Lists.newArrayList(agg0.getType(), agg1.getType()), aggFunction.getType(), false), Function.CompareMode.IS_IDENTICAL); Preconditions.checkState(newFn != null, "cannot find function " + functionName); CallOperator newOperator = new CallOperator(functionName, aggFunction.getType(), Lists.newArrayList(agg0, agg1), newFn); return newOperator; } } } } } reservedAggregations.put(op, oldAggregations.get(op)); return op; } private ScalarOperator rewriteCastForAggExpr(ScalarOperator expr) { if (expr instanceof CastOperator) { CastOperator castOperator = (CastOperator) expr; Type toType = castOperator.getType(); ScalarOperator fromOperator = castOperator.getChild(0); Type fromType = fromOperator.getType(); if (ScalarType.isFullyCompatible(fromType, toType)) { AggregateFunction possibleAggregateFunction = AggregateFunction.createBuiltin( FunctionSet.SUM, Lists.newArrayList(fromType), toType, toType, false, true, false); if (GlobalStateMgr.getCurrentState().getFunction( possibleAggregateFunction, Function.CompareMode.IS_IDENTICAL) != null) { return rewriteCastForAggExpr(fromOperator); } } } return expr; } private ColumnRefOperator createColumnRefForAggArgument(ScalarOperator arg) { ColumnRefOperator newColumnRef; if (arg.isColumnRef()) { newColumnRef = (ColumnRefOperator) arg; if (!oldPreAggProjections.containsKey(arg)) { newPreAggProjections.put((ColumnRefOperator) arg, arg); } } else if (commonArguments.containsKey(arg)) { newColumnRef = commonArguments.get(arg); } else { newColumnRef = columnRefFactory.create(arg, arg.getType(), arg.isNullable()); newPreAggProjections.put(newColumnRef, arg); commonArguments.put(arg, newColumnRef); } return newColumnRef; } }
Do we want to keep that?
public void testThatTheKogitoApplicationRuns() throws MavenInvocationException, IOException { testDir = initProject("projects/simple-kogito", "projects/project-classic-run-kogito"); run("-e"); await() .pollDelay(1, TimeUnit.SECONDS) .atMost(1, TimeUnit.MINUTES).until(() -> getHttpResponse("/persons").equals("[]")); }
run("-e");
public void testThatTheKogitoApplicationRuns() throws MavenInvocationException, IOException { testDir = initProject("projects/simple-kogito", "projects/project-classic-run-kogito"); run("-e"); await() .pollDelay(1, TimeUnit.SECONDS) .atMost(1, TimeUnit.MINUTES).until(() -> getHttpResponse("/persons").equals("[]")); }
class KogitoDevModeIT extends RunAndCheckMojoTestBase { @Test }
class KogitoDevModeIT extends RunAndCheckMojoTestBase { @Test }
<!--thread_id:cc_182896317_t; commit:1fd121da1417624b3b84f0300648251da64b9cb5; resolved:1--> <!--section:context-quote--> > **jkff** wrote: > Ditto, reference the JIRA here too if any <!--section:body--> Done.
public void testIncomingConnection() throws Exception { StreamObserver<BeamFnApi.InstructionRequest> requestObserver = mock(StreamObserver.class); StreamObserver<BeamFnApi.InstructionResponse> responseObserver = controlService.control(requestObserver); InstructionRequestHandler client = pool.getSource().get(null); String id = "fakeInstruction"; CompletionStage<BeamFnApi.InstructionResponse> responseFuture = client.handle(BeamFnApi.InstructionRequest.newBuilder().setInstructionId(id).build()); verify(requestObserver).onNext(any(BeamFnApi.InstructionRequest.class)); assertThat(MoreFutures.isDone(responseFuture), is(false)); responseObserver.onNext( BeamFnApi.InstructionResponse.newBuilder().setInstructionId(id).build()); MoreFutures.get(responseFuture); }
public void testIncomingConnection() throws Exception { StreamObserver<BeamFnApi.InstructionRequest> requestObserver = mock(StreamObserver.class); StreamObserver<BeamFnApi.InstructionResponse> responseObserver = controlService.control(requestObserver); InstructionRequestHandler client = pool.getSource().take("", Duration.ofSeconds(2)); String id = "fakeInstruction"; CompletionStage<BeamFnApi.InstructionResponse> responseFuture = client.handle(BeamFnApi.InstructionRequest.newBuilder().setInstructionId(id).build()); verify(requestObserver).onNext(any(BeamFnApi.InstructionRequest.class)); assertThat(MoreFutures.isDone(responseFuture), is(false)); responseObserver.onNext( BeamFnApi.InstructionResponse.newBuilder().setInstructionId(id).build()); MoreFutures.get(responseFuture); }
class FnApiControlClientPoolServiceTest { private final ControlClientPool pool = MapControlClientPool.withTimeout(Duration.ofSeconds(10)); private final FnApiControlClientPoolService controlService = FnApiControlClientPoolService.offeringClientsToPool( pool.getSink(), GrpcContextHeaderAccessorProvider.getHeaderAccessor()); private GrpcFnServer<FnApiControlClientPoolService> server; private BeamFnControlGrpc.BeamFnControlStub stub; @Before public void setup() throws IOException { server = GrpcFnServer.allocatePortAndCreateFor(controlService, InProcessServerFactory.create()); stub = BeamFnControlGrpc.newStub( InProcessChannelBuilder.forName(server.getApiServiceDescriptor().getUrl()).build()); } @After public void teardown() throws Exception { server.close(); } @Test @Test public void testCloseCompletesClients() throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicBoolean sawComplete = new AtomicBoolean(); stub.control( new StreamObserver<InstructionRequest>() { @Override public void onNext(InstructionRequest value) { Assert.fail("Should never see a request"); } @Override public void onError(Throwable t) { latch.countDown(); } @Override public void onCompleted() { sawComplete.set(true); latch.countDown(); } }); pool.getSource().get(null); server.close(); latch.await(); assertThat(sawComplete.get(), is(true)); } }
class FnApiControlClientPoolServiceTest { private final ControlClientPool pool = MapControlClientPool.create(); private final FnApiControlClientPoolService controlService = FnApiControlClientPoolService.offeringClientsToPool( pool.getSink(), GrpcContextHeaderAccessorProvider.getHeaderAccessor()); private GrpcFnServer<FnApiControlClientPoolService> server; private BeamFnControlGrpc.BeamFnControlStub stub; @Before public void setup() throws IOException { server = GrpcFnServer.allocatePortAndCreateFor(controlService, InProcessServerFactory.create()); stub = BeamFnControlGrpc.newStub( InProcessChannelBuilder.forName(server.getApiServiceDescriptor().getUrl()).build()); } @After public void teardown() throws Exception { server.close(); } @Test @Test public void testCloseCompletesClients() throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicBoolean sawComplete = new AtomicBoolean(); stub.control( new StreamObserver<InstructionRequest>() { @Override public void onNext(InstructionRequest value) { Assert.fail("Should never see a request"); } @Override public void onError(Throwable t) { latch.countDown(); } @Override public void onCompleted() { sawComplete.set(true); latch.countDown(); } }); pool.getSource().take("", Duration.ofSeconds(2)); server.close(); latch.await(); assertThat(sawComplete.get(), is(true)); } }
Right, in the case of more than one session error, if we see multiple entries of the first log and at least one of the second log entries, then it's good. It looks like one of the session errors resulting in the processor restart may cancel the merge operator, which could, in turn, cancel some enqueued errors to bounded-pool that are yet-to-run; this is why I said at least one-second log entry (statement is based on code scanning, I didn't try to simulate it. Something to double check in future when we investigate the effect of handling amp-error in _RetryWhen_ level vs Processor level, this [thread](https://github.com/Azure/azure-sdk-for-java/pull/33386#discussion_r1100499816))
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { return Mono.<Long>error(failure).publishOn(Schedulers.boundedElastic()); } }))); }
return Mono.<Long>error(failure).publishOn(Schedulers.boundedElastic());
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { final long id = System.nanoTime(); LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Unable to acquire new session.", failure); return Mono.<Long>error(failure) .publishOn(Schedulers.boundedElastic()) .doOnError(e -> LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Emitting the error signal received for session acquire attempt.", e) ); } }))); }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final String TRACKING_ID_KEY = "trackingId"; private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
Can we include more information about the construct here? Like a qualified name instead of just the name? e.g., `ballerina/lang.value:isReadOnly` For object methods it may have to be like `Obj.method`. For anonymous object methods we can probably give only the method name though.
public void testDeprecatedWarningForIsReadOnly() { CompileResult result = BCompileUtil.compile( "test-src/expressions/builtinoperations/is_readonly_deprecated_warning.bal"); int index = 0; validateWarning(result, index++, "usage of construct 'isReadOnly' is deprecated", 22, 9); validateWarning(result, index++, "usage of construct 'isReadOnly' is deprecated", 24, 17); Assert.assertEquals(result.getDiagnostics().length, index); }
validateWarning(result, index++, "usage of construct 'isReadOnly' is deprecated", 22, 9);
public void testDeprecatedWarningForIsReadOnly() { CompileResult result = BCompileUtil.compile( "test-src/expressions/builtinoperations/is_readonly_deprecated_warning.bal"); int index = 0; validateWarning(result, index++, "usage of construct 'ballerina/lang.value:0.0.0:isReadOnly' is deprecated", 22, 9); validateWarning(result, index++, "usage of construct 'ballerina/lang.value:0.0.0:isReadOnly' is deprecated", 24, 17); Assert.assertEquals(result.getDiagnostics().length, index); }
class FreezeAndIsFrozenTest { private static final String FREEZE_SUCCESSFUL = "freeze successful"; private CompileResult result; private CompileResult semanticsNegativeResult; private CompileResult negativeResult; @BeforeClass public void setup() { result = BCompileUtil.compile("test-src/expressions/builtinoperations/freeze-and-isfrozen.bal"); semanticsNegativeResult = BCompileUtil.compile( "test-src/expressions/builtinoperations/freeze-and-isfrozen-semantics-negative.bal"); negativeResult = BCompileUtil.compile( "test-src/expressions/builtinoperations/freeze-and-isfrozen-negative.bal"); } @Test() public void testFreezeOnNilTypedValue() { BValue[] returns = BRunUtil.invoke(result, "testFreezeOnNilTypedValue"); Assert.assertEquals(returns.length, 1); Assert.assertNull(returns[0]); } @Test(dataProvider = "booleanValues") public void testBooleanFreeze(boolean i) { BValue[] returns = BRunUtil.invoke(result, "testBooleanFreeze", new BValue[]{new BBoolean(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected booleans to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected booleans to be readonly"); } @Test(dataProvider = "intValues") public void testIntFreeze(int i) { BValue[] returns = BRunUtil.invoke(result, "testIntFreeze", new BValue[]{new BInteger(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected ints to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected ints to be readonly"); } @Test(dataProvider = "byteValues") public void testByteFreeze(int i) { BValue[] returns = BRunUtil.invoke(result, "testByteFreeze", new BValue[]{new BByte(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected bytes to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected bytes to be readonly"); } @Test(dataProvider = "floatValues") public void testFloatFreeze(double i) { BValue[] returns = BRunUtil.invoke(result, "testFloatFreeze", new BValue[]{new BFloat(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected floats to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected floats to be readonly"); } @Test(dataProvider = "decimalValues") public void testDecimalFreeze(BigDecimal i) { BValue[] returns = BRunUtil.invoke(result, "testDecimalFreeze", new BValue[]{new BDecimal(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected decimals to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected decimals to be readonly"); } @Test(dataProvider = "stringValues") public void testStringFreeze(String i) { BValue[] returns = BRunUtil.invoke(result, "testStringFreeze", new BValue[]{new BString(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected strings to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected strings to be readonly"); } @Test public void testRecordWithEnumFreeze() { BRunUtil.invoke(result, "testRecordWithEnumFreeze"); } @Test public void testBasicTypeNullableUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypeNullableUnionFreeze", new BValue[]{}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be readonly"); } @Test public void testBasicTypeUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypeUnionFreeze", new BValue[]{}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be readonly"); } @Test public void testBasicTypesAsJsonFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypesAsJsonFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected json values to be the same"); } @Test public void testIsFrozenOnStructuralTypes() { BValue[] returns = BRunUtil.invoke(result, "testIsFrozenOnStructuralTypes", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertFalse(((BBoolean) returns[0]).booleanValue(), "Expected values to be identified as not frozen"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be identified as frozen"); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\"" + ":\"modification not allowed on readonly value\".*", dataProvider = "frozenBasicTypeArrayModificationFunctions") public void testFrozenBasicTypeArrayModification(String frozenBasicTypeArrayModificationFunction) { BRunUtil.invoke(result, frozenBasicTypeArrayModificationFunction, new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value.*") public void testFrozenDecimalArrayModification() { BRunUtil.invoke(result, "testFrozenDecimalArrayModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":\"" + "modification not allowed on readonly value\"}.*") public void testFrozenJsonArrayModification() { BRunUtil.invoke(result, "testFrozenJsonArrayModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenJsonModification() { BRunUtil.invoke(result, "testFrozenJsonModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testAdditionToFrozenJson() { BRunUtil.invoke(result, "testAdditionToFrozenJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"failed " + "to remove element from map: modification not allowed on readonly value.*") public void testRemovalFromFrozenJson() { BRunUtil.invoke(result, "testRemovalFromFrozenJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenInnerJsonModification() { BRunUtil.invoke(result, "testFrozenInnerJsonModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testAdditionToFrozenInnerJson() { BRunUtil.invoke(result, "testAdditionToFrozenInnerJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"failed to remove element from map: modification not allowed on readonly value\".*") public void testRemovalFromFrozenInnerJson() { BRunUtil.invoke(result, "testRemovalFromFrozenInnerJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.xml}XMLOperationError \\{\"message\":" + "\"Failed to set children to xml element: modification not allowed on readonly value\".*") public void testFrozenXmlSetChildren() { BRunUtil.invoke(result, "testFrozenXmlSetChildren", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.xml}XMLOperationError \\{\"message\":" + "\"Failed to set children to xml element: modification not allowed on readonly value\".*") public void testFrozenXmlSetChildrenDeep() { BRunUtil.invoke(result, "testFrozenXmlSetChildrenDeep", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"" + "Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenMapUpdate() { BRunUtil.invoke(result, "testFrozenMapUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"" + "failed to remove element from map: modification not allowed on readonly value\".*") public void testFrozenMapRemoval() { BRunUtil.invoke(result, "testFrozenMapRemoval", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Failed to clear map: modification not allowed on readonly value\".*") public void testFrozenMapClear() { BRunUtil.invoke(result, "testFrozenMapClear", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\"" + ":\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenInnerMapUpdate() { BRunUtil.invoke(result, "testFrozenInnerMapUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"failed to remove element from map: modification not allowed on readonly value\".*") public void testFrozenInnerMapRemoval() { BRunUtil.invoke(result, "testFrozenInnerMapRemoval", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Failed to clear map: modification not allowed on readonly value\".*") public void testFrozenInnerMapClear() { BRunUtil.invoke(result, "testFrozenInnerMapClear", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenAnyArrayAddition() { BRunUtil.invoke(result, "testFrozenAnyArrayAddition", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenAnyArrayUpdate() { BRunUtil.invoke(result, "testFrozenAnyArrayUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'name' in record of type '\\(Employee & readonly\\)'\".*") public void testFrozenAnyArrayElementUpdate() { BRunUtil.invoke(result, "testFrozenAnyArrayElementUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenTupleUpdate() { BRunUtil.invoke(result, "testFrozenTupleUpdate", new BValue[0]); } @Test public void testFrozenRecursiveTupleUpdate() { BRunUtil.invoke(result, "testFrozenRecursiveTupleUpdate"); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'name' in record of type '\\(DeptEmployee & readonly\\)'\".*") public void testFrozenRecordUpdate() { BRunUtil.invoke(result, "testFrozenRecordUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'code' in record of type '\\(Dept & readonly\\)'\".*") public void testFrozenInnerRecordUpdate() { BRunUtil.invoke(result, "testFrozenInnerRecordUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.table}InvalidUpdate " + "\\{\"message\":\"modification not allowed on readonly value\"}.*") public void testFrozenTableAddition() { BRunUtil.invoke(result, "testFrozenTableAddition", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.table}InvalidUpdate " + "\\{\"message\":\"modification not allowed on readonly value\"}.*") public void testFrozenTableRemoval() { BRunUtil.invoke(result, "testFrozenTableRemoval", new BValue[0]); } @Test public void testSimpleUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testSimpleUnionFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be identified as frozen"); } @Test(description = "test a map of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexMapFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexMapFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testRecursiveTupleFreeze() { BRunUtil.invoke(result, "testRecursiveTupleFreeze"); } @Test(description = "test an array of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexArrayFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexArrayFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a record of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexRecordFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexRecordFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a tuple of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexTupleFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexTupleFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a union of member type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexUnionFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testValidSelfReferencingValueFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidSelfReferencingValueFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testStructureWithErrorValueFreeze() { BValue[] returns = BRunUtil.invoke(result, "testStructureWithErrorValueFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFrozenValueUpdatePanicWithCheckTrap() { BValue[] returns = BRunUtil.invoke(result, "testFrozenValueUpdatePanicWithCheckTrap", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BError.class); Assert.assertEquals(((BMap<String, BString>) ((BError) returns[0]).getDetails()).get("message").stringValue(), "modification not allowed on readonly value"); } @Test public void testXMLItemsCloneReadOnly() { BRunUtil.invoke(result, "testXMLItemsCloneReadOnly"); } @Test public void testFreezeAndIsFrozenSemanticsNegativeCases() { Assert.assertEquals(semanticsNegativeResult.getErrorCount(), 20); int index = 0; validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'PersonObj'", 19, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<PersonObj>'", 24, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'PersonObj[]'", 29, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(PersonObjTwo|PersonObj)?[]'", 32, 10); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[" + "(PersonObj|PersonObjTwo),PersonObjTwo]'", 39, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'Department'", 44, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<" + "(string|PersonObj)>'", 49, 32); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<[(string|PersonObj)," + "(FreezeAllowedDepartment|float)]>'", 52, 26); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(boolean|PersonObj|float)?[]'", 55, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(boolean|PersonObj|float)?[]'", 57, 16); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[(string|PersonObj)," + "(FreezeAllowedDepartment|float)]'", 60, 60); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found " + "'FreezeAllowedDepartment'", 63, 35); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '(string|PersonObj)'", 66 , 27); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'any', found 'error'", 71, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<(string|PersonObj)" + ">'", 81, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(string|stream<int>|float)?[]'", 92, 47); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found " + "'FreezeAllowedDepartment2'", 100, 42); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[int," + "(string|PersonObj|float),boolean]'", 106, 21); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(int|Department|PersonObj)'", 113, 42); validateError(semanticsNegativeResult, index, "incompatible types: expected 'anydata', found '((anydata & readonly)|error)'", 120, 19); } @Test public void testFreezeAndIsFrozenNegativeCases() { Assert.assertEquals(negativeResult.getErrorCount(), 2); validateError(negativeResult, 0, "variable 'ageRec' is not initialized", 24, 10); validateError(negativeResult, 1, "variable 'ageRec' is not initialized", 24, 10); } @DataProvider(name = "booleanValues") public Object[][] booleanValues() { return new Object[][]{ {true}, {false} }; } @DataProvider(name = "intValues") public Object[][] intValues() { return new Object[][]{ {-123457}, {0}, {1}, {53456032} }; } @DataProvider(name = "byteValues") public Object[][] byteValues() { return new Object[][]{ {0}, {1}, {255} }; } @DataProvider(name = "floatValues") public Object[][] floatValues() { return new Object[][]{ {-1234.57}, {0.0}, {1.1}, {53456.032} }; } @DataProvider(name = "decimalValues") public Object[][] decimalValues() { return new Object[][]{ {new BigDecimal("-1234.57", MathContext.DECIMAL128)}, {new BigDecimal("53456.032", MathContext.DECIMAL128)}, {new BigDecimal("0.0", MathContext.DECIMAL128)}, {new BigDecimal("1.1", MathContext.DECIMAL128)} }; } @DataProvider(name = "stringValues") public Object[][] stringValues() { return new Object[][]{ {"a"}, {"Hello, from Ballerina!"} }; } @DataProvider(name = "frozenBasicTypeArrayModificationFunctions") public Object[][] frozenBasicTypeArrayModificationFunctions() { return new Object[][]{ {"testFrozenIntArrayModification"}, {"testFrozenByteArrayModification"}, {"testFrozenBooleanArrayModification"}, {"testFrozenFloatArrayModification"}, {"testFrozenStringArrayModification"} }; } @Test }
class FreezeAndIsFrozenTest { private static final String FREEZE_SUCCESSFUL = "freeze successful"; private CompileResult result; private CompileResult semanticsNegativeResult; private CompileResult negativeResult; @BeforeClass public void setup() { result = BCompileUtil.compile("test-src/expressions/builtinoperations/freeze-and-isfrozen.bal"); semanticsNegativeResult = BCompileUtil.compile( "test-src/expressions/builtinoperations/freeze-and-isfrozen-semantics-negative.bal"); negativeResult = BCompileUtil.compile( "test-src/expressions/builtinoperations/freeze-and-isfrozen-negative.bal"); } @Test() public void testFreezeOnNilTypedValue() { BValue[] returns = BRunUtil.invoke(result, "testFreezeOnNilTypedValue"); Assert.assertEquals(returns.length, 1); Assert.assertNull(returns[0]); } @Test(dataProvider = "booleanValues") public void testBooleanFreeze(boolean i) { BValue[] returns = BRunUtil.invoke(result, "testBooleanFreeze", new BValue[]{new BBoolean(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected booleans to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected booleans to be readonly"); } @Test(dataProvider = "intValues") public void testIntFreeze(int i) { BValue[] returns = BRunUtil.invoke(result, "testIntFreeze", new BValue[]{new BInteger(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected ints to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected ints to be readonly"); } @Test(dataProvider = "byteValues") public void testByteFreeze(int i) { BValue[] returns = BRunUtil.invoke(result, "testByteFreeze", new BValue[]{new BByte(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected bytes to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected bytes to be readonly"); } @Test(dataProvider = "floatValues") public void testFloatFreeze(double i) { BValue[] returns = BRunUtil.invoke(result, "testFloatFreeze", new BValue[]{new BFloat(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected floats to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected floats to be readonly"); } @Test(dataProvider = "decimalValues") public void testDecimalFreeze(BigDecimal i) { BValue[] returns = BRunUtil.invoke(result, "testDecimalFreeze", new BValue[]{new BDecimal(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected decimals to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected decimals to be readonly"); } @Test(dataProvider = "stringValues") public void testStringFreeze(String i) { BValue[] returns = BRunUtil.invoke(result, "testStringFreeze", new BValue[]{new BString(i)}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected strings to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected strings to be readonly"); } @Test public void testRecordWithEnumFreeze() { BRunUtil.invoke(result, "testRecordWithEnumFreeze"); } @Test public void testBasicTypeNullableUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypeNullableUnionFreeze", new BValue[]{}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be readonly"); } @Test public void testBasicTypeUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypeUnionFreeze", new BValue[]{}); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be the same"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be readonly"); } @Test public void testBasicTypesAsJsonFreeze() { BValue[] returns = BRunUtil.invoke(result, "testBasicTypesAsJsonFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected json values to be the same"); } @Test public void testIsFrozenOnStructuralTypes() { BValue[] returns = BRunUtil.invoke(result, "testIsFrozenOnStructuralTypes", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertFalse(((BBoolean) returns[0]).booleanValue(), "Expected values to be identified as not frozen"); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected values to be identified as frozen"); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\"" + ":\"modification not allowed on readonly value\".*", dataProvider = "frozenBasicTypeArrayModificationFunctions") public void testFrozenBasicTypeArrayModification(String frozenBasicTypeArrayModificationFunction) { BRunUtil.invoke(result, frozenBasicTypeArrayModificationFunction, new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value.*") public void testFrozenDecimalArrayModification() { BRunUtil.invoke(result, "testFrozenDecimalArrayModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":\"" + "modification not allowed on readonly value\"}.*") public void testFrozenJsonArrayModification() { BRunUtil.invoke(result, "testFrozenJsonArrayModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenJsonModification() { BRunUtil.invoke(result, "testFrozenJsonModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testAdditionToFrozenJson() { BRunUtil.invoke(result, "testAdditionToFrozenJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"failed " + "to remove element from map: modification not allowed on readonly value.*") public void testRemovalFromFrozenJson() { BRunUtil.invoke(result, "testRemovalFromFrozenJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenInnerJsonModification() { BRunUtil.invoke(result, "testFrozenInnerJsonModification", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Invalid map insertion: modification not allowed on readonly value\".*") public void testAdditionToFrozenInnerJson() { BRunUtil.invoke(result, "testAdditionToFrozenInnerJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"failed to remove element from map: modification not allowed on readonly value\".*") public void testRemovalFromFrozenInnerJson() { BRunUtil.invoke(result, "testRemovalFromFrozenInnerJson", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.xml}XMLOperationError \\{\"message\":" + "\"Failed to set children to xml element: modification not allowed on readonly value\".*") public void testFrozenXmlSetChildren() { BRunUtil.invoke(result, "testFrozenXmlSetChildren", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.xml}XMLOperationError \\{\"message\":" + "\"Failed to set children to xml element: modification not allowed on readonly value\".*") public void testFrozenXmlSetChildrenDeep() { BRunUtil.invoke(result, "testFrozenXmlSetChildrenDeep", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"" + "Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenMapUpdate() { BRunUtil.invoke(result, "testFrozenMapUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":\"" + "failed to remove element from map: modification not allowed on readonly value\".*") public void testFrozenMapRemoval() { BRunUtil.invoke(result, "testFrozenMapRemoval", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Failed to clear map: modification not allowed on readonly value\".*") public void testFrozenMapClear() { BRunUtil.invoke(result, "testFrozenMapClear", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\"" + ":\"Invalid map insertion: modification not allowed on readonly value\".*") public void testFrozenInnerMapUpdate() { BRunUtil.invoke(result, "testFrozenInnerMapUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"failed to remove element from map: modification not allowed on readonly value\".*") public void testFrozenInnerMapRemoval() { BRunUtil.invoke(result, "testFrozenInnerMapRemoval", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InvalidUpdate \\{\"message\":" + "\"Failed to clear map: modification not allowed on readonly value\".*") public void testFrozenInnerMapClear() { BRunUtil.invoke(result, "testFrozenInnerMapClear", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenAnyArrayAddition() { BRunUtil.invoke(result, "testFrozenAnyArrayAddition", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenAnyArrayUpdate() { BRunUtil.invoke(result, "testFrozenAnyArrayUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'name' in record of type '\\(Employee & readonly\\)'\".*") public void testFrozenAnyArrayElementUpdate() { BRunUtil.invoke(result, "testFrozenAnyArrayElementUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.array}InvalidUpdate \\{\"message\":" + "\"modification not allowed on readonly value\".*") public void testFrozenTupleUpdate() { BRunUtil.invoke(result, "testFrozenTupleUpdate", new BValue[0]); } @Test public void testFrozenRecursiveTupleUpdate() { BRunUtil.invoke(result, "testFrozenRecursiveTupleUpdate"); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'name' in record of type '\\(DeptEmployee & readonly\\)'\".*") public void testFrozenRecordUpdate() { BRunUtil.invoke(result, "testFrozenRecordUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.map}InherentTypeViolation \\{\"message\":" + "\"cannot update 'readonly' field 'code' in record of type '\\(Dept & readonly\\)'\".*") public void testFrozenInnerRecordUpdate() { BRunUtil.invoke(result, "testFrozenInnerRecordUpdate", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.table}InvalidUpdate " + "\\{\"message\":\"modification not allowed on readonly value\"}.*") public void testFrozenTableAddition() { BRunUtil.invoke(result, "testFrozenTableAddition", new BValue[0]); } @Test(expectedExceptions = BLangRuntimeException.class, expectedExceptionsMessageRegExp = "error: \\{ballerina/lang.table}InvalidUpdate " + "\\{\"message\":\"modification not allowed on readonly value\"}.*") public void testFrozenTableRemoval() { BRunUtil.invoke(result, "testFrozenTableRemoval", new BValue[0]); } @Test public void testSimpleUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testSimpleUnionFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue(), "Expected values to be identified as frozen"); } @Test(description = "test a map of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexMapFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexMapFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testRecursiveTupleFreeze() { BRunUtil.invoke(result, "testRecursiveTupleFreeze"); } @Test(description = "test an array of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexArrayFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexArrayFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a record of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexRecordFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexRecordFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a tuple of type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexTupleFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexTupleFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test(description = "test a union of member type not purely anydata, a combination of anydata and non-anydata") public void testValidComplexUnionFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidComplexUnionFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testValidSelfReferencingValueFreeze() { BValue[] returns = BRunUtil.invoke(result, "testValidSelfReferencingValueFreeze", new BValue[0]); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), FREEZE_SUCCESSFUL); Assert.assertSame(returns[1].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[1]).booleanValue(), "Expected value to be readonly since no error " + "was encountered"); } @Test public void testStructureWithErrorValueFreeze() { BValue[] returns = BRunUtil.invoke(result, "testStructureWithErrorValueFreeze", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFrozenValueUpdatePanicWithCheckTrap() { BValue[] returns = BRunUtil.invoke(result, "testFrozenValueUpdatePanicWithCheckTrap", new BValue[0]); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BError.class); Assert.assertEquals(((BMap<String, BString>) ((BError) returns[0]).getDetails()).get("message").stringValue(), "modification not allowed on readonly value"); } @Test public void testXMLItemsCloneReadOnly() { BRunUtil.invoke(result, "testXMLItemsCloneReadOnly"); } @Test public void testFreezeAndIsFrozenSemanticsNegativeCases() { Assert.assertEquals(semanticsNegativeResult.getErrorCount(), 20); int index = 0; validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'PersonObj'", 19, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<PersonObj>'", 24, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'PersonObj[]'", 29, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(PersonObjTwo|PersonObj)?[]'", 32, 10); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[" + "(PersonObj|PersonObjTwo),PersonObjTwo]'", 39, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'Department'", 44, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<" + "(string|PersonObj)>'", 49, 32); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<[(string|PersonObj)," + "(FreezeAllowedDepartment|float)]>'", 52, 26); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(boolean|PersonObj|float)?[]'", 55, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(boolean|PersonObj|float)?[]'", 57, 16); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[(string|PersonObj)," + "(FreezeAllowedDepartment|float)]'", 60, 60); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found " + "'FreezeAllowedDepartment'", 63, 35); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '(string|PersonObj)'", 66 , 27); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'any', found 'error'", 71, 9); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found 'map<(string|PersonObj)" + ">'", 81, 39); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(string|stream<int>|float)?[]'", 92, 47); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found " + "'FreezeAllowedDepartment2'", 100, 42); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '[int," + "(string|PersonObj|float),boolean]'", 106, 21); validateError(semanticsNegativeResult, index++, "incompatible types: expected 'ballerina/lang.value:0.0.0:Cloneable', found '" + "(int|Department|PersonObj)'", 113, 42); validateError(semanticsNegativeResult, index, "incompatible types: expected 'anydata', found '((anydata & readonly)|error)'", 120, 19); } @Test public void testFreezeAndIsFrozenNegativeCases() { Assert.assertEquals(negativeResult.getErrorCount(), 2); validateError(negativeResult, 0, "variable 'ageRec' is not initialized", 24, 10); validateError(negativeResult, 1, "variable 'ageRec' is not initialized", 24, 10); } @DataProvider(name = "booleanValues") public Object[][] booleanValues() { return new Object[][]{ {true}, {false} }; } @DataProvider(name = "intValues") public Object[][] intValues() { return new Object[][]{ {-123457}, {0}, {1}, {53456032} }; } @DataProvider(name = "byteValues") public Object[][] byteValues() { return new Object[][]{ {0}, {1}, {255} }; } @DataProvider(name = "floatValues") public Object[][] floatValues() { return new Object[][]{ {-1234.57}, {0.0}, {1.1}, {53456.032} }; } @DataProvider(name = "decimalValues") public Object[][] decimalValues() { return new Object[][]{ {new BigDecimal("-1234.57", MathContext.DECIMAL128)}, {new BigDecimal("53456.032", MathContext.DECIMAL128)}, {new BigDecimal("0.0", MathContext.DECIMAL128)}, {new BigDecimal("1.1", MathContext.DECIMAL128)} }; } @DataProvider(name = "stringValues") public Object[][] stringValues() { return new Object[][]{ {"a"}, {"Hello, from Ballerina!"} }; } @DataProvider(name = "frozenBasicTypeArrayModificationFunctions") public Object[][] frozenBasicTypeArrayModificationFunctions() { return new Object[][]{ {"testFrozenIntArrayModification"}, {"testFrozenByteArrayModification"}, {"testFrozenBooleanArrayModification"}, {"testFrozenFloatArrayModification"}, {"testFrozenStringArrayModification"} }; } @Test }
@StefanRRichter , thanks a lot for your careful review. It is a real bug. I have fixed it. In addition, I have added test cases by making BitSetTest parameterized. So the clear method is tested against different amounts of bits.
public void clear() { int index = 0; while (index < byteLength) { memorySegment.putLong(offset + index, 0L); index += 8; } while (index < byteLength) { memorySegment.put(offset + index, (byte) 0); } }
while (index < byteLength) {
public void clear() { int index = 0; while (index + 8 <= byteLength) { memorySegment.putLong(offset + index, 0L); index += 8; } while (index < byteLength) { memorySegment.put(offset + index, (byte) 0); index += 1; } }
class BitSet { private MemorySegment memorySegment; private int offset; private int byteLength; private int bitLength; private final int BYTE_INDEX_MASK = 0x00000007; public BitSet(int byteSize) { Preconditions.checkArgument(byteSize > 0, "bits size should be greater than 0."); this.byteLength = byteSize; this.bitLength = byteSize << 3; } public void setMemorySegment(MemorySegment memorySegment, int offset) { Preconditions.checkArgument(memorySegment != null, "MemorySegment can not be null."); Preconditions.checkArgument(offset >= 0, "Offset should be positive integer."); Preconditions.checkArgument(offset + byteLength <= memorySegment.size(), "Could not set MemorySegment, the remain buffers is not enough."); this.memorySegment = memorySegment; this.offset = offset; } /** * Sets the bit at specified index. * * @param index - position */ public void set(int index) { Preconditions.checkArgument(index < bitLength && index >= 0); int byteIndex = index >>> 3; byte current = memorySegment.get(offset + byteIndex); current |= (1 << (index & BYTE_INDEX_MASK)); memorySegment.put(offset + byteIndex, current); } /** * Returns true if the bit is set in the specified index. * * @param index - position * @return - value at the bit position */ public boolean get(int index) { Preconditions.checkArgument(index < bitLength && index >= 0); int byteIndex = index >>> 3; byte current = memorySegment.get(offset + byteIndex); return (current & (1 << (index & BYTE_INDEX_MASK))) != 0; } /** * Number of bits */ public int bitSize() { return bitLength; } /** * Clear the bit set. */ @Override public String toString() { StringBuilder output = new StringBuilder(); output.append("BitSet:\n"); output.append("\tMemorySegment:").append(memorySegment.size()).append("\n"); output.append("\tOffset:").append(offset).append("\n"); output.append("\tLength:").append(byteLength).append("\n"); return output.toString(); } }
class BitSet { private MemorySegment memorySegment; private int offset; private int byteLength; private int bitLength; private final int BYTE_INDEX_MASK = 0x00000007; public BitSet(int byteSize) { Preconditions.checkArgument(byteSize > 0, "bits size should be greater than 0."); this.byteLength = byteSize; this.bitLength = byteSize << 3; } public void setMemorySegment(MemorySegment memorySegment, int offset) { Preconditions.checkArgument(memorySegment != null, "MemorySegment can not be null."); Preconditions.checkArgument(offset >= 0, "Offset should be positive integer."); Preconditions.checkArgument(offset + byteLength <= memorySegment.size(), "Could not set MemorySegment, the remain buffers is not enough."); this.memorySegment = memorySegment; this.offset = offset; } /** * Sets the bit at specified index. * * @param index - position */ public void set(int index) { Preconditions.checkArgument(index < bitLength && index >= 0); int byteIndex = index >>> 3; byte current = memorySegment.get(offset + byteIndex); current |= (1 << (index & BYTE_INDEX_MASK)); memorySegment.put(offset + byteIndex, current); } /** * Returns true if the bit is set in the specified index. * * @param index - position * @return - value at the bit position */ public boolean get(int index) { Preconditions.checkArgument(index < bitLength && index >= 0); int byteIndex = index >>> 3; byte current = memorySegment.get(offset + byteIndex); return (current & (1 << (index & BYTE_INDEX_MASK))) != 0; } /** * Number of bits */ public int bitSize() { return bitLength; } /** * Clear the bit set. */ @Override public String toString() { StringBuilder output = new StringBuilder(); output.append("BitSet:\n"); output.append("\tMemorySegment:").append(memorySegment.size()).append("\n"); output.append("\tOffset:").append(offset).append("\n"); output.append("\tLength:").append(byteLength).append("\n"); return output.toString(); } }
@dpcollins-google On this constructor argument change, I replaced the Beam code with the default values by reading the diff in https://github.com/googleapis/java-pubsublite/pull/475/files. But the PartitionCountWatchingPublisher constructor changed its accessor from public to package-default. Do you have a suggestion on how to adopt the change here? ``` > Task :sdks:java:io:google-cloud-platform:compileJava /Users/suztomo/beam/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsublite/Publishers.java:51: error: PartitionCountWatchingPublisher(PartitionPublisherFactory, com.google.cloud.pubsublite.internal.RoutingPolicy.Factory, com.google.cloud.pubsublite.internal.wire.PartitionCountWatcher.Factory) is not public in PartitionCountWatchingPublisher; cannot be accessed from outside package return new PartitionCountWatchingPublisher( ^ ```
static Publisher<MessageMetadata> newPublisher(PublisherOptions options) throws ApiException { SerializableSupplier<Object> supplier = options.publisherSupplier(); if (supplier != null) { Object supplied = supplier.get(); TypeToken<Publisher<MessageMetadata>> token = new TypeToken<Publisher<MessageMetadata>>() {}; checkArgument(token.isSupertypeOf(supplied.getClass())); return (Publisher<MessageMetadata>) supplied; } TopicPath topic = options.topicPath(); return new PartitionCountWatchingPublisher( partition -> SinglePartitionPublisherBuilder.newBuilder() .setTopic(topic) .setPartition(partition) .build(), DefaultRoutingPolicy::new, new PartitionCountWatcherImpl.Factory( topic, AdminClient.create( AdminClientSettings.newBuilder().setRegion(topic.location().region()).build()), Duration.ofMinutes(10))); }
return new PartitionCountWatchingPublisher(
static Publisher<MessageMetadata> newPublisher(PublisherOptions options) throws ApiException { SerializableSupplier<Object> supplier = options.publisherSupplier(); if (supplier != null) { Object supplied = supplier.get(); TypeToken<Publisher<MessageMetadata>> token = new TypeToken<Publisher<MessageMetadata>>() {}; checkArgument(token.isSupertypeOf(supplied.getClass())); return (Publisher<MessageMetadata>) supplied; } TopicPath topic = options.topicPath(); PartitionCountWatchingPublisherSettings.Builder publisherSettings = PartitionCountWatchingPublisherSettings.newBuilder() .setTopic(topic) .setPublisherFactory( partition -> SinglePartitionPublisherBuilder.newBuilder() .setTopic(topic) .setPartition(partition) .build()) .setAdminClient( AdminClient.create( AdminClientSettings.newBuilder().setRegion(topic.location().region()).build())); return publisherSettings.build().instantiate(); }
class Publishers { private static final Framework FRAMEWORK = Framework.of("BEAM"); private Publishers() {} @SuppressWarnings("unchecked") }
class Publishers { private static final Framework FRAMEWORK = Framework.of("BEAM"); private Publishers() {} @SuppressWarnings("unchecked") }
Again, this just makes the code less readable. BAOS.close() doesn't do anything.
public void handleMessage(Object msg) { try { if (msg instanceof HttpResponse) { HttpResponse res = (HttpResponse) msg; responseBuilder.setStatusCode(res.status().code()); if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { responseBuilder.setStatusDescription(res.status().reasonPhrase()); } responseBuilder.setMultiValueHeaders(new Headers()); for (String name : res.headers().names()) { for (String v : res.headers().getAll(name)) { responseBuilder.getMultiValueHeaders().add(name, v); } } } if (msg instanceof HttpContent) { HttpContent content = (HttpContent) msg; int readable = content.content().readableBytes(); if (baos == null && readable > 0) { baos = createByteStream(); } for (int i = 0; i < readable; i++) { baos.write(content.content().readByte()); } } if (msg instanceof FileRegion) { FileRegion file = (FileRegion) msg; if (file.count() > 0 && file.transferred() < file.count()) { if (baos == null) baos = createByteStream(); if (byteChannel == null) byteChannel = Channels.newChannel(baos); file.transferTo(byteChannel, file.transferred()); } } if (msg instanceof LastHttpContent) { if (baos != null) { if (isBinary(responseBuilder.getMultiValueHeaders().getFirst("Content-Type"))) { responseBuilder.setBase64Encoded(true); responseBuilder.setBody(Base64.getMimeEncoder().encodeToString(baos.toByteArray())); } else { responseBuilder.setBody(new String(baos.toByteArray(), StandardCharsets.UTF_8)); } try { baos.close(); } catch (IOException e) { LOG.warn("Unable to close the ByteArrayOutputStream", e); } } future.complete(responseBuilder); } } catch (Throwable ex) { future.completeExceptionally(ex); } finally { if (msg != null) { ReferenceCountUtil.release(msg); } } }
baos.close();
public void handleMessage(Object msg) { try { if (msg instanceof HttpResponse) { HttpResponse res = (HttpResponse) msg; responseBuilder.setStatusCode(res.status().code()); if (request.getRequestSource() == AwsProxyRequest.RequestSource.ALB) { responseBuilder.setStatusDescription(res.status().reasonPhrase()); } responseBuilder.setMultiValueHeaders(new Headers()); for (String name : res.headers().names()) { for (String v : res.headers().getAll(name)) { responseBuilder.getMultiValueHeaders().add(name, v); } } } if (msg instanceof HttpContent) { HttpContent content = (HttpContent) msg; int readable = content.content().readableBytes(); if (baos == null && readable > 0) { baos = createByteStream(); } for (int i = 0; i < readable; i++) { baos.write(content.content().readByte()); } } if (msg instanceof FileRegion) { FileRegion file = (FileRegion) msg; if (file.count() > 0 && file.transferred() < file.count()) { if (baos == null) baos = createByteStream(); if (byteChannel == null) byteChannel = Channels.newChannel(baos); file.transferTo(byteChannel, file.transferred()); } } if (msg instanceof LastHttpContent) { if (baos != null) { if (isBinary(responseBuilder.getMultiValueHeaders().getFirst("Content-Type"))) { responseBuilder.setBase64Encoded(true); responseBuilder.setBody(Base64.getMimeEncoder().encodeToString(baos.toByteArray())); } else { responseBuilder.setBody(new String(baos.toByteArray(), StandardCharsets.UTF_8)); } } future.complete(responseBuilder); } } catch (Throwable ex) { future.completeExceptionally(ex); } finally { if (msg != null) { ReferenceCountUtil.release(msg); } } }
class NettyResponseHandler implements VirtualResponseHandler { AwsProxyResponse responseBuilder = new AwsProxyResponse(); ByteArrayOutputStream baos; WritableByteChannel byteChannel; final AwsProxyRequest request; CompletableFuture<AwsProxyResponse> future = new CompletableFuture<>(); public NettyResponseHandler(AwsProxyRequest request) { this.request = request; } public CompletableFuture<AwsProxyResponse> getFuture() { return future; } @Override @Override public void close() { if (!future.isDone()) future.completeExceptionally(new RuntimeException("Connection closed")); } }
class NettyResponseHandler implements VirtualResponseHandler { AwsProxyResponse responseBuilder = new AwsProxyResponse(); ByteArrayOutputStream baos; WritableByteChannel byteChannel; final AwsProxyRequest request; CompletableFuture<AwsProxyResponse> future = new CompletableFuture<>(); public NettyResponseHandler(AwsProxyRequest request) { this.request = request; } public CompletableFuture<AwsProxyResponse> getFuture() { return future; } @Override @Override public void close() { if (!future.isDone()) future.completeExceptionally(new RuntimeException("Connection closed")); } }
This leaves the reserved node with the application for 20-27 mins (until it is expired by `ReservationExpirer`), this is especially unfortunate in CD when we want to fail integration tests when some node are stuck in a non-active state. We should prepare the application again with the node unmarked as wantToRetire or move it directly to dirty if that is safe. FYI: @hmusum
private boolean deployTo(Move move) { ApplicationId application = move.node.allocation().get().owner(); try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, nodeRepository())) { if ( ! deployment.isValid()) return false; boolean couldMarkRetiredNow = markWantToRetire(move.node, true); if ( ! couldMarkRetiredNow) return false; try { if ( ! deployment.prepare()) return false; if (nodeRepository().getNodes(application, Node.State.reserved).stream().noneMatch(node -> node.hasParent(move.toHost.hostname()))) return false; if ( ! deployment.activate()) return false; log.info("Rebalancer redeployed " + application + " to " + move); return true; } finally { markWantToRetire(move.node, false); } } }
return false;
private boolean deployTo(Move move) { ApplicationId application = move.node.allocation().get().owner(); try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, nodeRepository())) { if ( ! deployment.isValid()) return false; boolean couldMarkRetiredNow = markWantToRetire(move.node, true); if ( ! couldMarkRetiredNow) return false; try { if ( ! deployment.prepare()) return false; if (nodeRepository().getNodes(application, Node.State.reserved).stream().noneMatch(node -> node.hasParent(move.toHost.hostname()))) return false; if ( ! deployment.activate()) return false; log.info("Rebalancer redeployed " + application + " to " + move); return true; } finally { markWantToRetire(move.node, false); } } }
class Rebalancer extends Maintainer { private final Deployer deployer; private final HostResourcesCalculator hostResourcesCalculator; private final Optional<HostProvisioner> hostProvisioner; private final Metric metric; private final Clock clock; public Rebalancer(Deployer deployer, NodeRepository nodeRepository, HostResourcesCalculator hostResourcesCalculator, Optional<HostProvisioner> hostProvisioner, Metric metric, Clock clock, Duration interval) { super(nodeRepository, interval); this.deployer = deployer; this.hostResourcesCalculator = hostResourcesCalculator; this.hostProvisioner = hostProvisioner; this.metric = metric; this.clock = clock; } @Override protected void maintain() { if (hostProvisioner.isPresent()) return; NodeList allNodes = nodeRepository().list(); updateSkewMetric(allNodes); if ( ! zoneIsStable(allNodes)) return; Move bestMove = findBestMove(allNodes); if (bestMove == Move.none) return; deployTo(bestMove); } /** We do this here rather than in MetricsReporter because it is expensive and frequent updates are unnecessary */ private void updateSkewMetric(NodeList allNodes) { DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator); double totalSkew = 0; int hostCount = 0; for (Node host : allNodes.nodeType((NodeType.host)).state(Node.State.active)) { hostCount++; totalSkew += Node.skew(host.flavor().resources(), capacity.freeCapacityOf(host)); } metric.set("hostedVespa.docker.skew", totalSkew/hostCount, null); } private boolean zoneIsStable(NodeList allNodes) { NodeList active = allNodes.state(Node.State.active); if (active.stream().anyMatch(node -> node.allocation().get().membership().retired())) return false; if (active.stream().anyMatch(node -> node.status().wantToRetire())) return false; return true; } /** * Find the best move to reduce allocation skew and returns it. * Returns Move.none if no moves can be made to reduce skew. */ private Move findBestMove(NodeList allNodes) { DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator); Move bestMove = Move.none; for (Node node : allNodes.nodeType(NodeType.tenant).state(Node.State.active)) { if (node.parentHostname().isEmpty()) continue; for (Node toHost : allNodes.nodeType(NodeType.host).state(NodePrioritizer.ALLOCATABLE_HOST_STATES)) { if (toHost.hostname().equals(node.parentHostname().get())) continue; if ( ! capacity.freeCapacityOf(toHost).satisfies(node.flavor().resources())) continue; double skewReductionAtFromHost = skewReductionByRemoving(node, allNodes.parentOf(node).get(), capacity); double skewReductionAtToHost = skewReductionByAdding(node, toHost, capacity); double netSkewReduction = skewReductionAtFromHost + skewReductionAtToHost; if (netSkewReduction > bestMove.netSkewReduction) bestMove = new Move(node, toHost, netSkewReduction); } } return bestMove; } /** Returns true only if this operation changes the state of the wantToRetire flag */ private boolean markWantToRetire(Node node, boolean wantToRetire) { try (Mutex lock = nodeRepository().lock(node)) { Optional<Node> nodeToMove = nodeRepository().getNode(node.hostname()); if (nodeToMove.isEmpty()) return false; if (nodeToMove.get().state() != Node.State.active) return false; if (node.status().wantToRetire() == wantToRetire) return false; nodeRepository().write(nodeToMove.get().withWantToRetire(wantToRetire, Agent.system, clock.instant()), lock); return true; } } /** * Try a redeployment to effect the chosen move. * If it can be done, that's ok; we'll try this or another move later. * * @return true if the move was done, false if it couldn't be */ private double skewReductionByRemoving(Node node, Node fromHost, DockerHostCapacity capacity) { NodeResources freeHostCapacity = capacity.freeCapacityOf(fromHost); double skewBefore = Node.skew(fromHost.flavor().resources(), freeHostCapacity); double skewAfter = Node.skew(fromHost.flavor().resources(), freeHostCapacity.add(node.flavor().resources().justNumbers())); return skewBefore - skewAfter; } private double skewReductionByAdding(Node node, Node toHost, DockerHostCapacity capacity) { NodeResources freeHostCapacity = capacity.freeCapacityOf(toHost); double skewBefore = Node.skew(toHost.flavor().resources(), freeHostCapacity); double skewAfter = Node.skew(toHost.flavor().resources(), freeHostCapacity.subtract(node.flavor().resources().justNumbers())); return skewBefore - skewAfter; } private static class Move { static final Move none = new Move(null, null, 0); final Node node; final Node toHost; final double netSkewReduction; Move(Node node, Node toHost, double netSkewReduction) { this.node = node; this.toHost = toHost; this.netSkewReduction = netSkewReduction; } @Override public String toString() { return "move " + ( node == null ? "none" : (node.hostname() + " to " + toHost + " [skew reduction " + netSkewReduction + "]")); } } private static class MaintenanceDeployment implements Closeable { private static final Logger log = Logger.getLogger(MaintenanceDeployment.class.getName()); private final ApplicationId application; private final Optional<Mutex> lock; private final Optional<Deployment> deployment; public MaintenanceDeployment(ApplicationId application, Deployer deployer, NodeRepository nodeRepository) { this.application = application; lock = tryLock(application, nodeRepository); deployment = tryDeployment(lock, application, deployer, nodeRepository); } /** Return whether this is - as yet - functional and can be used to carry out the deployment */ public boolean isValid() { return deployment.isPresent(); } private Optional<Mutex> tryLock(ApplicationId application, NodeRepository nodeRepository) { try { return Optional.of(nodeRepository.lock(application, Duration.ofSeconds(1))); } catch (ApplicationLockException e) { return Optional.empty(); } } private Optional<Deployment> tryDeployment(Optional<Mutex> lock, ApplicationId application, Deployer deployer, NodeRepository nodeRepository) { if (lock.isEmpty()) return Optional.empty(); if (nodeRepository.getNodes(application, Node.State.active).isEmpty()) return Optional.empty(); return deployer.deployFromLocalActive(application); } public boolean prepare() { return doStep(() -> deployment.get().prepare()); } public boolean activate() { return doStep(() -> deployment.get().activate()); } private boolean doStep(Runnable action) { if ( ! isValid()) return false; try { action.run(); return true; } catch (TransientException e) { log.log(LogLevel.INFO, "Failed to deploy " + application + " with a transient error: " + Exceptions.toMessageString(e)); return false; } catch (RuntimeException e) { log.log(LogLevel.WARNING, "Exception on maintenance deploy of " + application, e); return false; } } @Override public void close() { lock.ifPresent(l -> l.close()); } } }
class Rebalancer extends Maintainer { private final Deployer deployer; private final HostResourcesCalculator hostResourcesCalculator; private final Optional<HostProvisioner> hostProvisioner; private final Metric metric; private final Clock clock; public Rebalancer(Deployer deployer, NodeRepository nodeRepository, HostResourcesCalculator hostResourcesCalculator, Optional<HostProvisioner> hostProvisioner, Metric metric, Clock clock, Duration interval) { super(nodeRepository, interval); this.deployer = deployer; this.hostResourcesCalculator = hostResourcesCalculator; this.hostProvisioner = hostProvisioner; this.metric = metric; this.clock = clock; } @Override protected void maintain() { if (hostProvisioner.isPresent()) return; NodeList allNodes = nodeRepository().list(); updateSkewMetric(allNodes); if ( ! zoneIsStable(allNodes)) return; Move bestMove = findBestMove(allNodes); if (bestMove == Move.none) return; deployTo(bestMove); } /** We do this here rather than in MetricsReporter because it is expensive and frequent updates are unnecessary */ private void updateSkewMetric(NodeList allNodes) { DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator); double totalSkew = 0; int hostCount = 0; for (Node host : allNodes.nodeType((NodeType.host)).state(Node.State.active)) { hostCount++; totalSkew += Node.skew(host.flavor().resources(), capacity.freeCapacityOf(host)); } metric.set("hostedVespa.docker.skew", totalSkew/hostCount, null); } private boolean zoneIsStable(NodeList allNodes) { NodeList active = allNodes.state(Node.State.active); if (active.stream().anyMatch(node -> node.allocation().get().membership().retired())) return false; if (active.stream().anyMatch(node -> node.status().wantToRetire())) return false; return true; } /** * Find the best move to reduce allocation skew and returns it. * Returns Move.none if no moves can be made to reduce skew. */ private Move findBestMove(NodeList allNodes) { DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator); Move bestMove = Move.none; for (Node node : allNodes.nodeType(NodeType.tenant).state(Node.State.active)) { if (node.parentHostname().isEmpty()) continue; for (Node toHost : allNodes.nodeType(NodeType.host).state(NodePrioritizer.ALLOCATABLE_HOST_STATES)) { if (toHost.hostname().equals(node.parentHostname().get())) continue; if ( ! capacity.freeCapacityOf(toHost).satisfies(node.flavor().resources())) continue; double skewReductionAtFromHost = skewReductionByRemoving(node, allNodes.parentOf(node).get(), capacity); double skewReductionAtToHost = skewReductionByAdding(node, toHost, capacity); double netSkewReduction = skewReductionAtFromHost + skewReductionAtToHost; if (netSkewReduction > bestMove.netSkewReduction) bestMove = new Move(node, toHost, netSkewReduction); } } return bestMove; } /** Returns true only if this operation changes the state of the wantToRetire flag */ private boolean markWantToRetire(Node node, boolean wantToRetire) { try (Mutex lock = nodeRepository().lock(node)) { Optional<Node> nodeToMove = nodeRepository().getNode(node.hostname()); if (nodeToMove.isEmpty()) return false; if (nodeToMove.get().state() != Node.State.active) return false; if (node.status().wantToRetire() == wantToRetire) return false; nodeRepository().write(nodeToMove.get().withWantToRetire(wantToRetire, Agent.system, clock.instant()), lock); return true; } } /** * Try a redeployment to effect the chosen move. * If it can be done, that's ok; we'll try this or another move later. * * @return true if the move was done, false if it couldn't be */ private double skewReductionByRemoving(Node node, Node fromHost, DockerHostCapacity capacity) { NodeResources freeHostCapacity = capacity.freeCapacityOf(fromHost); double skewBefore = Node.skew(fromHost.flavor().resources(), freeHostCapacity); double skewAfter = Node.skew(fromHost.flavor().resources(), freeHostCapacity.add(node.flavor().resources().justNumbers())); return skewBefore - skewAfter; } private double skewReductionByAdding(Node node, Node toHost, DockerHostCapacity capacity) { NodeResources freeHostCapacity = capacity.freeCapacityOf(toHost); double skewBefore = Node.skew(toHost.flavor().resources(), freeHostCapacity); double skewAfter = Node.skew(toHost.flavor().resources(), freeHostCapacity.subtract(node.flavor().resources().justNumbers())); return skewBefore - skewAfter; } private static class Move { static final Move none = new Move(null, null, 0); final Node node; final Node toHost; final double netSkewReduction; Move(Node node, Node toHost, double netSkewReduction) { this.node = node; this.toHost = toHost; this.netSkewReduction = netSkewReduction; } @Override public String toString() { return "move " + ( node == null ? "none" : (node.hostname() + " to " + toHost + " [skew reduction " + netSkewReduction + "]")); } } private static class MaintenanceDeployment implements Closeable { private static final Logger log = Logger.getLogger(MaintenanceDeployment.class.getName()); private final ApplicationId application; private final Optional<Mutex> lock; private final Optional<Deployment> deployment; public MaintenanceDeployment(ApplicationId application, Deployer deployer, NodeRepository nodeRepository) { this.application = application; lock = tryLock(application, nodeRepository); deployment = tryDeployment(lock, application, deployer, nodeRepository); } /** Return whether this is - as yet - functional and can be used to carry out the deployment */ public boolean isValid() { return deployment.isPresent(); } private Optional<Mutex> tryLock(ApplicationId application, NodeRepository nodeRepository) { try { return Optional.of(nodeRepository.lock(application, Duration.ofSeconds(1))); } catch (ApplicationLockException e) { return Optional.empty(); } } private Optional<Deployment> tryDeployment(Optional<Mutex> lock, ApplicationId application, Deployer deployer, NodeRepository nodeRepository) { if (lock.isEmpty()) return Optional.empty(); if (nodeRepository.getNodes(application, Node.State.active).isEmpty()) return Optional.empty(); return deployer.deployFromLocalActive(application); } public boolean prepare() { return doStep(() -> deployment.get().prepare()); } public boolean activate() { return doStep(() -> deployment.get().activate()); } private boolean doStep(Runnable action) { if ( ! isValid()) return false; try { action.run(); return true; } catch (TransientException e) { log.log(LogLevel.INFO, "Failed to deploy " + application + " with a transient error: " + Exceptions.toMessageString(e)); return false; } catch (RuntimeException e) { log.log(LogLevel.WARNING, "Exception on maintenance deploy of " + application, e); return false; } } @Override public void close() { lock.ifPresent(l -> l.close()); } } }
not sure we need to be consistent across languages on this condition to set headers, but just want to point it. LGTM otherwise.
private HttpPipeline getDefaultHttpPipeline(Configuration buildConfiguration) { final List<HttpPipelinePolicy> policies = new ArrayList<>(); if (tokenCredential != null) { policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, DEFAULT_SCOPE)); } else if (!CoreUtils.isNullOrEmpty(metricsAdvisorKeyCredential.getSubscriptionKey()) || !CoreUtils.isNullOrEmpty(metricsAdvisorKeyCredential.getApiKey())) { headers.put(OCP_APIM_SUBSCRIPTION_KEY, metricsAdvisorKeyCredential.getSubscriptionKey()); headers.put(API_KEY, metricsAdvisorKeyCredential.getApiKey()); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion, buildConfiguration)); policies.add(new RequestIdPolicy()); policies.add(new AddHeadersPolicy(headers)); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy); policies.add(new AddDatePolicy()); policies.addAll(this.policies); HttpPolicyProviders.addAfterRetryPolicies(policies); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .build(); }
|| !CoreUtils.isNullOrEmpty(metricsAdvisorKeyCredential.getApiKey())) {
private HttpPipeline getDefaultHttpPipeline(Configuration buildConfiguration) { final List<HttpPipelinePolicy> policies = new ArrayList<>(); if (tokenCredential != null) { policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, DEFAULT_SCOPE)); } else if (!CoreUtils.isNullOrEmpty(metricsAdvisorKeyCredential.getSubscriptionKey()) || !CoreUtils.isNullOrEmpty(metricsAdvisorKeyCredential.getApiKey())) { headers.put(OCP_APIM_SUBSCRIPTION_KEY, metricsAdvisorKeyCredential.getSubscriptionKey()); headers.put(API_KEY, metricsAdvisorKeyCredential.getApiKey()); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion, buildConfiguration)); policies.add(new RequestIdPolicy()); policies.add(new AddHeadersPolicy(headers)); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy); policies.add(new AddDatePolicy()); policies.addAll(this.policies); HttpPolicyProviders.addAfterRetryPolicies(policies); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .build(); }
class MetricsAdvisorClientBuilder { private static final String ECHO_REQUEST_ID_HEADER = "x-ms-return-client-request-id"; private static final String CONTENT_TYPE_HEADER_VALUE = ContentType.APPLICATION_JSON; private static final String ACCEPT_HEADER = "Accept"; private static final String METRICSADVISOR_PROPERTIES = "azure-ai-metricsadvisor.properties"; private static final String NAME = "name"; private static final String VERSION = "version"; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS); private static final String DEFAULT_SCOPE = "https: private final ClientLogger logger = new ClientLogger(MetricsAdvisorClientBuilder.class); private final List<HttpPipelinePolicy> policies; private final HttpHeaders headers; private final String clientName; private final String clientVersion; private String endpoint; private MetricsAdvisorKeyCredential metricsAdvisorKeyCredential; private TokenCredential tokenCredential; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private HttpPipeline httpPipeline; private Configuration configuration; private RetryPolicy retryPolicy; private MetricsAdvisorServiceVersion version; static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key"; static final String API_KEY = "x-api-key"; /** * The constructor with defaults. */ public MetricsAdvisorClientBuilder() { policies = new ArrayList<>(); httpLogOptions = new HttpLogOptions(); Map<String, String> properties = CoreUtils.getProperties(METRICSADVISOR_PROPERTIES); clientName = properties.getOrDefault(NAME, "UnknownName"); clientVersion = properties.getOrDefault(VERSION, "UnknownVersion"); headers = new HttpHeaders() .put(ECHO_REQUEST_ID_HEADER, "true") .put(ACCEPT_HEADER, CONTENT_TYPE_HEADER_VALUE); } /** * Creates a {@link MetricsAdvisorClient} based on options set in the builder. Every time * {@code buildClient()} is called a new instance of {@link MetricsAdvisorClient} is created. * * <p> * If {@link * {@link * settings are ignored. * </p> * * @return A MetricsAdvisorClient with the options set from the builder. * @throws NullPointerException if {@link * {@link * @throws IllegalArgumentException if {@link */ public MetricsAdvisorClient buildClient() { return new MetricsAdvisorClient(buildAsyncClient()); } /** * Creates a {@link MetricsAdvisorAsyncClient} based on options set in the builder. Every time * {@code buildAsyncClient()} is called a new instance of {@link MetricsAdvisorAsyncClient} is created. * * <p> * If {@link * {@link * settings are ignored. * </p> * * @return A MetricsAdvisorAsyncClient with the options set from the builder. * @throws NullPointerException if {@link * {@link * has not been set. * @throws IllegalArgumentException if {@link */ public MetricsAdvisorAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null."); final Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration; final MetricsAdvisorServiceVersion serviceVersion = version != null ? version : MetricsAdvisorServiceVersion.getLatest(); HttpPipeline pipeline = httpPipeline; if (pipeline == null) { pipeline = getDefaultHttpPipeline(buildConfiguration); } final AzureCognitiveServiceMetricsAdvisorRestAPIOpenAPIV2Impl advisorRestAPIOpenAPIV2 = new AzureCognitiveServiceMetricsAdvisorRestAPIOpenAPIV2ImplBuilder() .endpoint(endpoint) .pipeline(pipeline) .buildClient(); return new MetricsAdvisorAsyncClient(advisorRestAPIOpenAPIV2, serviceVersion); } /** * Sets the service endpoint for the Azure Metrics Advisor instance. * * @param endpoint The URL of the Azure Metrics Advisor instance service requests to and receive responses from. * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException if {@code endpoint} is null * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ public MetricsAdvisorClientBuilder endpoint(String endpoint) { Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); try { new URL(endpoint); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex)); } if (endpoint.endsWith("/")) { this.endpoint = endpoint.substring(0, endpoint.length() - 1); } else { this.endpoint = endpoint; } return this; } /** * Sets the {@link TokenCredential} used to authenticate HTTP requests. * * @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests. * @return The updated {@link MetricsAdvisorClientBuilder} object. * @throws NullPointerException If {@code tokenCredential} is null. */ public MetricsAdvisorClientBuilder credential(TokenCredential tokenCredential) { this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); return this; } /** * Sets the {@link MetricsAdvisorKeyCredential} to use when authenticating HTTP requests for this * MetricsAdvisorClientBuilder. * * @param metricsAdvisorKeyCredential {@link MetricsAdvisorKeyCredential} API key credential * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException If {@code metricsAdvisorKeyCredential} is null. */ public MetricsAdvisorClientBuilder credential(MetricsAdvisorKeyCredential metricsAdvisorKeyCredential) { this.metricsAdvisorKeyCredential = Objects.requireNonNull(metricsAdvisorKeyCredential, "'metricsAdvisorKeyCredential' cannot be null."); return this; } /** * Sets the logging configuration for HTTP requests and responses. * * <p>If {@code logOptions} isn't provided, the default options will use {@link HttpLogDetailLevel * which will prevent logging.</p> * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Adds a policy to the set of existing policies that are executed after required policies. * * @param policy The retry policy for service requests. * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException If {@code policy} is null. */ public MetricsAdvisorClientBuilder addPolicy(HttpPipelinePolicy policy) { policies.add(Objects.requireNonNull(policy, "'policy' cannot be null.")); return this; } /** * Sets the HTTP client to use for sending and receiving requests to and from the service. * * @param client The HTTP client to use for requests. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the HTTP pipeline to use for the service client. * <p> * If {@code pipeline} is set, all other settings are ignored, aside from * {@link MetricsAdvisorClientBuilder * {@link MetricsAdvisorClient}. * * @param httpPipeline The HTTP pipeline to use for sending service requests and receiving responses. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder pipeline(HttpPipeline httpPipeline) { if (this.httpPipeline != null && httpPipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link RetryPolicy * <p> * The default retry policy will be used if not provided {@link MetricsAdvisorClientBuilder * to build {@link MetricsAdvisorAsyncClient} or {@link MetricsAdvisorClient}. * * @param retryPolicy user's retry policy applied to each request. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link MetricsAdvisorServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version the client library will have the result of potentially moving to a newer service version. * * @param version {@link MetricsAdvisorServiceVersion} of the service to be used when making requests. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder serviceVersion(MetricsAdvisorServiceVersion version) { this.version = version; return this; } }
class MetricsAdvisorClientBuilder { private static final String ECHO_REQUEST_ID_HEADER = "x-ms-return-client-request-id"; private static final String CONTENT_TYPE_HEADER_VALUE = ContentType.APPLICATION_JSON; private static final String ACCEPT_HEADER = "Accept"; private static final String METRICSADVISOR_PROPERTIES = "azure-ai-metricsadvisor.properties"; private static final String NAME = "name"; private static final String VERSION = "version"; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS); private static final String DEFAULT_SCOPE = "https: private final ClientLogger logger = new ClientLogger(MetricsAdvisorClientBuilder.class); private final List<HttpPipelinePolicy> policies; private final HttpHeaders headers; private final String clientName; private final String clientVersion; private String endpoint; private MetricsAdvisorKeyCredential metricsAdvisorKeyCredential; private TokenCredential tokenCredential; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private HttpPipeline httpPipeline; private Configuration configuration; private RetryPolicy retryPolicy; private MetricsAdvisorServiceVersion version; static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key"; static final String API_KEY = "x-api-key"; /** * The constructor with defaults. */ public MetricsAdvisorClientBuilder() { policies = new ArrayList<>(); httpLogOptions = new HttpLogOptions(); Map<String, String> properties = CoreUtils.getProperties(METRICSADVISOR_PROPERTIES); clientName = properties.getOrDefault(NAME, "UnknownName"); clientVersion = properties.getOrDefault(VERSION, "UnknownVersion"); headers = new HttpHeaders() .put(ECHO_REQUEST_ID_HEADER, "true") .put(ACCEPT_HEADER, CONTENT_TYPE_HEADER_VALUE); } /** * Creates a {@link MetricsAdvisorClient} based on options set in the builder. Every time * {@code buildClient()} is called a new instance of {@link MetricsAdvisorClient} is created. * * <p> * If {@link * {@link * settings are ignored. * </p> * * @return A MetricsAdvisorClient with the options set from the builder. * @throws NullPointerException if {@link * {@link * @throws IllegalArgumentException if {@link */ public MetricsAdvisorClient buildClient() { return new MetricsAdvisorClient(buildAsyncClient()); } /** * Creates a {@link MetricsAdvisorAsyncClient} based on options set in the builder. Every time * {@code buildAsyncClient()} is called a new instance of {@link MetricsAdvisorAsyncClient} is created. * * <p> * If {@link * {@link * settings are ignored. * </p> * * @return A MetricsAdvisorAsyncClient with the options set from the builder. * @throws NullPointerException if {@link * {@link * has not been set. * @throws IllegalArgumentException if {@link */ public MetricsAdvisorAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null."); final Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration; final MetricsAdvisorServiceVersion serviceVersion = version != null ? version : MetricsAdvisorServiceVersion.getLatest(); HttpPipeline pipeline = httpPipeline; if (pipeline == null) { pipeline = getDefaultHttpPipeline(buildConfiguration); } final AzureCognitiveServiceMetricsAdvisorRestAPIOpenAPIV2Impl advisorRestAPIOpenAPIV2 = new AzureCognitiveServiceMetricsAdvisorRestAPIOpenAPIV2ImplBuilder() .endpoint(endpoint) .pipeline(pipeline) .buildClient(); return new MetricsAdvisorAsyncClient(advisorRestAPIOpenAPIV2, serviceVersion); } /** * Sets the service endpoint for the Azure Metrics Advisor instance. * * @param endpoint The URL of the Azure Metrics Advisor instance service requests to and receive responses from. * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException if {@code endpoint} is null * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ public MetricsAdvisorClientBuilder endpoint(String endpoint) { Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); try { new URL(endpoint); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex)); } if (endpoint.endsWith("/")) { this.endpoint = endpoint.substring(0, endpoint.length() - 1); } else { this.endpoint = endpoint; } return this; } /** * Sets the {@link TokenCredential} used to authenticate HTTP requests. * * @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests. * @return The updated {@link MetricsAdvisorClientBuilder} object. * @throws NullPointerException If {@code tokenCredential} is null. */ public MetricsAdvisorClientBuilder credential(TokenCredential tokenCredential) { this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); return this; } /** * Sets the {@link MetricsAdvisorKeyCredential} to use when authenticating HTTP requests for this * MetricsAdvisorClientBuilder. * * @param metricsAdvisorKeyCredential {@link MetricsAdvisorKeyCredential} API key credential * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException If {@code metricsAdvisorKeyCredential} is null. */ public MetricsAdvisorClientBuilder credential(MetricsAdvisorKeyCredential metricsAdvisorKeyCredential) { this.metricsAdvisorKeyCredential = Objects.requireNonNull(metricsAdvisorKeyCredential, "'metricsAdvisorKeyCredential' cannot be null."); return this; } /** * Sets the logging configuration for HTTP requests and responses. * * <p>If {@code logOptions} isn't provided, the default options will use {@link HttpLogDetailLevel * which will prevent logging.</p> * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Adds a policy to the set of existing policies that are executed after required policies. * * @param policy The retry policy for service requests. * * @return The updated MetricsAdvisorClientBuilder object. * @throws NullPointerException If {@code policy} is null. */ public MetricsAdvisorClientBuilder addPolicy(HttpPipelinePolicy policy) { policies.add(Objects.requireNonNull(policy, "'policy' cannot be null.")); return this; } /** * Sets the HTTP client to use for sending and receiving requests to and from the service. * * @param client The HTTP client to use for requests. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the HTTP pipeline to use for the service client. * <p> * If {@code pipeline} is set, all other settings are ignored, aside from * {@link MetricsAdvisorClientBuilder * {@link MetricsAdvisorClient}. * * @param httpPipeline The HTTP pipeline to use for sending service requests and receiving responses. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder pipeline(HttpPipeline httpPipeline) { if (this.httpPipeline != null && httpPipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link RetryPolicy * <p> * The default retry policy will be used if not provided {@link MetricsAdvisorClientBuilder * to build {@link MetricsAdvisorAsyncClient} or {@link MetricsAdvisorClient}. * * @param retryPolicy user's retry policy applied to each request. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link MetricsAdvisorServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version the client library will have the result of potentially moving to a newer service version. * * @param version {@link MetricsAdvisorServiceVersion} of the service to be used when making requests. * * @return The updated MetricsAdvisorClientBuilder object. */ public MetricsAdvisorClientBuilder serviceVersion(MetricsAdvisorServiceVersion version) { this.version = version; return this; } }
`DescribeStmt` is only for `describe table`. `files()` is a table function table, so i put it here.
public DescribeStmt(Map<String, String> tableFunctionProperties, NodePosition pos) { super(pos); this.dbTableName = null; this.totalRows = new LinkedList<>(); this.isTableFunctionTable = true; this.tableFunctionProperties = tableFunctionProperties; }
this.isTableFunctionTable = true;
public DescribeStmt(Map<String, String> tableFunctionProperties, NodePosition pos) { super(pos); this.dbTableName = null; this.totalRows = new LinkedList<>(); this.isTableFunctionTable = true; this.tableFunctionProperties = tableFunctionProperties; }
class DescribeStmt extends ShowStmt { private static final ShowResultSetMetaData DESC_OLAP_TABLE_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .addColumn(new Column("Key", ScalarType.createVarchar(10))) .addColumn(new Column("Default", ScalarType.createVarchar(30))) .addColumn(new Column("Extra", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_OLAP_TABLE_ALL_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("IndexName", ScalarType.createVarchar(20))) .addColumn(new Column("IndexKeysType", ScalarType.createVarchar(20))) .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .addColumn(new Column("Key", ScalarType.createVarchar(10))) .addColumn(new Column("Default", ScalarType.createVarchar(30))) .addColumn(new Column("Extra", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_MYSQL_TABLE_ALL_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Host", ScalarType.createVarchar(30))) .addColumn(new Column("Port", ScalarType.createVarchar(10))) .addColumn(new Column("User", ScalarType.createVarchar(30))) .addColumn(new Column("Password", ScalarType.createVarchar(30))) .addColumn(new Column("Database", ScalarType.createVarchar(30))) .addColumn(new Column("Table", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_TABLE_FUNCTION_TABLE_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .build(); public static final List<String> EMPTY_ROW = initEmptyRow(); private final TableName dbTableName; private ProcNodeInterface node; List<List<String>> totalRows; private boolean isAllTables; private boolean isOlapTable; private boolean isMaterializedView; private boolean isTableFunctionTable = false; private Map<String, String> tableFunctionProperties = null; public DescribeStmt(TableName dbTableName, boolean isAllTables) { this(dbTableName, isAllTables, NodePosition.ZERO); } public DescribeStmt(TableName dbTableName, boolean isAllTables, NodePosition pos) { super(pos); this.dbTableName = dbTableName; this.totalRows = new LinkedList<>(); this.isAllTables = isAllTables; } public boolean isAllTables() { return isAllTables; } public String getTableName() { return dbTableName.getTbl(); } public String getDb() { return dbTableName.getDb(); } public TableName getDbTableName() { return dbTableName; } public List<List<String>> getTotalRows() { return totalRows; } public void setMaterializedView(boolean materializedView) { isMaterializedView = materializedView; } public void setAllTables(boolean allTables) { isAllTables = allTables; } public void setNode(ProcNodeInterface node) { this.node = node; } public boolean isOlapTable() { return isOlapTable; } public void setOlapTable(boolean olapTable) { isOlapTable = olapTable; } public boolean isTableFunctionTable() { return isTableFunctionTable; } public Map<String, String> getTableFunctionProperties() { return tableFunctionProperties; } public List<List<String>> getResultRows() throws AnalysisException { if (isAllTables || isMaterializedView || isTableFunctionTable) { return totalRows; } else { Preconditions.checkNotNull(node); return node.fetchResult().getRows(); } } @Override public ShowResultSetMetaData getMetaData() { if (isTableFunctionTable) { return DESC_TABLE_FUNCTION_TABLE_META_DATA; } if (!isAllTables) { if (isMaterializedView) { return DESC_OLAP_TABLE_META_DATA; } else { ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder(); ProcResult result = null; try { result = node.fetchResult(); } catch (AnalysisException e) { return builder.build(); } for (String col : result.getColumnNames()) { builder.addColumn(new Column(col, ScalarType.createVarchar(30))); } return builder.build(); } } else { if (isOlapTable) { return DESC_OLAP_TABLE_ALL_META_DATA; } else if (isMaterializedView) { return DESC_OLAP_TABLE_META_DATA; } else { return DESC_MYSQL_TABLE_ALL_META_DATA; } } } private static List<String> initEmptyRow() { List<String> emptyRow = new ArrayList<>(DESC_OLAP_TABLE_ALL_META_DATA.getColumns().size()); for (int i = 0; i < DESC_OLAP_TABLE_ALL_META_DATA.getColumns().size(); i++) { emptyRow.add(""); } return emptyRow; } @Override public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitDescTableStmt(this, context); } }
class DescribeStmt extends ShowStmt { private static final ShowResultSetMetaData DESC_OLAP_TABLE_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .addColumn(new Column("Key", ScalarType.createVarchar(10))) .addColumn(new Column("Default", ScalarType.createVarchar(30))) .addColumn(new Column("Extra", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_OLAP_TABLE_ALL_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("IndexName", ScalarType.createVarchar(20))) .addColumn(new Column("IndexKeysType", ScalarType.createVarchar(20))) .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .addColumn(new Column("Key", ScalarType.createVarchar(10))) .addColumn(new Column("Default", ScalarType.createVarchar(30))) .addColumn(new Column("Extra", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_MYSQL_TABLE_ALL_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Host", ScalarType.createVarchar(30))) .addColumn(new Column("Port", ScalarType.createVarchar(10))) .addColumn(new Column("User", ScalarType.createVarchar(30))) .addColumn(new Column("Password", ScalarType.createVarchar(30))) .addColumn(new Column("Database", ScalarType.createVarchar(30))) .addColumn(new Column("Table", ScalarType.createVarchar(30))) .build(); private static final ShowResultSetMetaData DESC_TABLE_FUNCTION_TABLE_META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Field", ScalarType.createVarchar(20))) .addColumn(new Column("Type", ScalarType.createVarchar(20))) .addColumn(new Column("Null", ScalarType.createVarchar(10))) .build(); public static final List<String> EMPTY_ROW = initEmptyRow(); private final TableName dbTableName; private ProcNodeInterface node; List<List<String>> totalRows; private boolean isAllTables; private boolean isOlapTable; private boolean isMaterializedView; private boolean isTableFunctionTable = false; private Map<String, String> tableFunctionProperties = null; public DescribeStmt(TableName dbTableName, boolean isAllTables) { this(dbTableName, isAllTables, NodePosition.ZERO); } public DescribeStmt(TableName dbTableName, boolean isAllTables, NodePosition pos) { super(pos); this.dbTableName = dbTableName; this.totalRows = new LinkedList<>(); this.isAllTables = isAllTables; } public boolean isAllTables() { return isAllTables; } public String getTableName() { return dbTableName.getTbl(); } public String getDb() { return dbTableName.getDb(); } public TableName getDbTableName() { return dbTableName; } public List<List<String>> getTotalRows() { return totalRows; } public void setMaterializedView(boolean materializedView) { isMaterializedView = materializedView; } public void setAllTables(boolean allTables) { isAllTables = allTables; } public void setNode(ProcNodeInterface node) { this.node = node; } public boolean isOlapTable() { return isOlapTable; } public void setOlapTable(boolean olapTable) { isOlapTable = olapTable; } public boolean isTableFunctionTable() { return isTableFunctionTable; } public Map<String, String> getTableFunctionProperties() { return tableFunctionProperties; } public List<List<String>> getResultRows() throws AnalysisException { if (isAllTables || isMaterializedView || isTableFunctionTable) { return totalRows; } else { Preconditions.checkNotNull(node); return node.fetchResult().getRows(); } } @Override public ShowResultSetMetaData getMetaData() { if (isTableFunctionTable) { return DESC_TABLE_FUNCTION_TABLE_META_DATA; } if (!isAllTables) { if (isMaterializedView) { return DESC_OLAP_TABLE_META_DATA; } else { ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder(); ProcResult result = null; try { result = node.fetchResult(); } catch (AnalysisException e) { return builder.build(); } for (String col : result.getColumnNames()) { builder.addColumn(new Column(col, ScalarType.createVarchar(30))); } return builder.build(); } } else { if (isOlapTable) { return DESC_OLAP_TABLE_ALL_META_DATA; } else if (isMaterializedView) { return DESC_OLAP_TABLE_META_DATA; } else { return DESC_MYSQL_TABLE_ALL_META_DATA; } } } private static List<String> initEmptyRow() { List<String> emptyRow = new ArrayList<>(DESC_OLAP_TABLE_ALL_META_DATA.getColumns().size()); for (int i = 0; i < DESC_OLAP_TABLE_ALL_META_DATA.getColumns().size(); i++) { emptyRow.add(""); } return emptyRow; } @Override public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitDescTableStmt(this, context); } }
Didn't see the purpose of re-wrapping an IOException
public boolean start() throws IOException { try { ConnectionHandler connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); String queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { if (source.spec.exchangeDeclare()) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); } if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; consumer = new QueueingConsumer(channel); channel.txSelect(); channel.setDefaultConsumer(consumer); channel.basicConsume(queueName, false, consumer); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return advance(); }
channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey());
public boolean start() throws IOException { try { connectionHandler = new ConnectionHandler(source.spec.uri()); connectionHandler.start(); Channel channel = connectionHandler.getChannel(); queueName = source.spec.queue(); if (source.spec.queueDeclare()) { channel.queueDeclare(queueName, false, false, false, null); } if (source.spec.exchange() != null) { if (source.spec.exchangeDeclare()) { channel.exchangeDeclare(source.spec.exchange(), source.spec.exchangeType()); } if (queueName == null) { queueName = channel.queueDeclare().getQueue(); } channel.queueBind(queueName, source.spec.exchange(), source.spec.routingKey()); } checkpointMark.channel = channel; channel.txSelect(); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return advance(); }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private QueueingConsumer consumer; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); try { connectionHandler = new ConnectionHandler(source.spec.uri()); } catch (Exception e) { throw new IOException(e); } } @Override public Instant getWatermark() { return checkpointMark.oldestTimestamp; } @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override @Override public boolean advance() throws IOException { try { QueueingConsumer.Delivery delivery = consumer.nextDelivery(1000); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProperties().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProperties().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
class UnboundedRabbitMqReader extends UnboundedSource.UnboundedReader<RabbitMqMessage> { private final RabbitMQSource source; private RabbitMqMessage current; private byte[] currentRecordId; private ConnectionHandler connectionHandler; private String queueName; private Instant currentTimestamp; private final RabbitMQCheckpointMark checkpointMark; UnboundedRabbitMqReader(RabbitMQSource source, RabbitMQCheckpointMark checkpointMark) throws IOException { this.source = source; this.current = null; this.checkpointMark = checkpointMark != null ? checkpointMark : new RabbitMQCheckpointMark(); } @Override public Instant getWatermark() { return checkpointMark.oldestTimestamp; } @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public RabbitMQSource getCurrentSource() { return source; } @Override public byte[] getCurrentRecordId() { if (current == null) { throw new NoSuchElementException(); } if (currentRecordId != null) { return currentRecordId; } else { return "".getBytes(StandardCharsets.UTF_8); } } @Override public Instant getCurrentTimestamp() { if (currentTimestamp == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public RabbitMqMessage getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override @Override public boolean advance() throws IOException { try { Channel channel = connectionHandler.getChannel(); GetResponse delivery = channel.basicGet(queueName, false); if (delivery == null) { return false; } if (source.spec.useCorrelationId()) { String correlationId = delivery.getProps().getCorrelationId(); if (correlationId == null) { throw new IOException( "RabbitMqIO.Read uses message correlation ID, but received " + "message has a null correlation ID"); } currentRecordId = correlationId.getBytes(StandardCharsets.UTF_8); } long deliveryTag = delivery.getEnvelope().getDeliveryTag(); checkpointMark.sessionIds.add(deliveryTag); current = new RabbitMqMessage(source.spec.routingKey(), delivery); currentTimestamp = new Instant(delivery.getProps().getTimestamp()); if (currentTimestamp.isBefore(checkpointMark.oldestTimestamp)) { checkpointMark.oldestTimestamp = currentTimestamp; } } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { if (connectionHandler != null) { connectionHandler.stop(); } } }
How about just removing the shortcut?
public History recordStateTransition(Node.State from, Node.State to, Agent agent, Instant at) { if (from == to && from != Node.State.reserved) return this; switch (to) { case provisioned: return this.with(new Event(Event.Type.provisioned, agent, at)); case ready: return this.withoutApplicationEvents().with(new Event(Event.Type.readied, agent, at)); case active: return this.with(new Event(Event.Type.activated, agent, at)); case inactive: return this.with(new Event(Event.Type.deactivated, agent, at)); case reserved: return this.with(new Event(Event.Type.reserved, agent, at)); case failed: return this.with(new Event(Event.Type.failed, agent, at)); case dirty: return this.with(new Event(Event.Type.deallocated, agent, at)); case parked: return this.with(new Event(Event.Type.parked, agent, at)); default: return this; } }
public History recordStateTransition(Node.State from, Node.State to, Agent agent, Instant at) { if (from == to && from != Node.State.reserved) return this; switch (to) { case provisioned: return this.with(new Event(Event.Type.provisioned, agent, at)); case ready: return this.withoutApplicationEvents().with(new Event(Event.Type.readied, agent, at)); case active: return this.with(new Event(Event.Type.activated, agent, at)); case inactive: return this.with(new Event(Event.Type.deactivated, agent, at)); case reserved: return this.with(new Event(Event.Type.reserved, agent, at)); case failed: return this.with(new Event(Event.Type.failed, agent, at)); case dirty: return this.with(new Event(Event.Type.deallocated, agent, at)); case parked: return this.with(new Event(Event.Type.parked, agent, at)); default: return this; } }
class History { private final ImmutableMap<Event.Type, Event> events; public History(Collection<Event> events) { this(toImmutableMap(events)); } private History(ImmutableMap<Event.Type, Event> events) { this.events = events; } private static ImmutableMap<Event.Type, Event> toImmutableMap(Collection<Event> events) { ImmutableMap.Builder<Event.Type, Event> builder = new ImmutableMap.Builder<>(); for (Event event : events) builder.put(event.type(), event); return builder.build(); } /** Returns this event if it is present in this history */ public Optional<Event> event(Event.Type type) { return Optional.ofNullable(events.get(type)); } public Collection<Event> events() { return events.values(); } /** Returns a copy of this history with the given event added */ public History with(Event event) { ImmutableMap.Builder<Event.Type, Event> builder = builderWithout(event.type()); builder.put(event.type(), event); return new History(builder.build()); } /** Returns a copy of this history with the given event type removed (or an identical history if it was not present) */ public History without(Event.Type type) { return new History(builderWithout(type).build()); } private ImmutableMap.Builder<Event.Type, Event> builderWithout(Event.Type type) { ImmutableMap.Builder<Event.Type, Event> builder = new ImmutableMap.Builder<>(); for (Event event : events.values()) if (event.type() != type) builder.put(event.type(), event); return builder; } /** Returns a copy of this history with a record of this state transition added, if applicable */ /** * Events can be application or node level. * This returns a copy of this history with all application level events removed. */ private History withoutApplicationEvents() { return new History(events().stream().filter(e -> ! e.type().isApplicationLevel()).collect(Collectors.toList())); } /** Returns the empty history */ public static History empty() { return new History(Collections.emptyList()); } @Override public String toString() { if (events.isEmpty()) return "history: (empty)"; StringBuilder b = new StringBuilder("history: "); for (Event e : events.values()) b.append(e).append(", "); b.setLength(b.length() -2); return b.toString(); } /** An event which may happen to a node */ public static class Event { private final Instant at; private final Agent agent; private final Type type; public Event(Event.Type type, Agent agent, Instant at) { this.type = type; this.agent = agent; this.at = at; } public enum Type { provisioned(false), readied, reserved, activated, deactivated, deallocated, parked, retired, down, requested, rebooted(false), failed(false); private final boolean applicationLevel; /** Creates an application level event */ Type() { this.applicationLevel = true; } Type(boolean applicationLevel) { this.applicationLevel = applicationLevel; } /** Returns true if this is an application level event and false it it is a node level event */ public boolean isApplicationLevel() { return applicationLevel; } } /** Returns the type of event */ public Event.Type type() { return type; } /** Returns the agent causing this event */ public Agent agent() { return agent; } /** Returns the instant this even took place */ public Instant at() { return at; } @Override public String toString() { return "'" + type + "' event at " + at + " by " + agent; } } }
class History { private final ImmutableMap<Event.Type, Event> events; public History(Collection<Event> events) { this(toImmutableMap(events)); } private History(ImmutableMap<Event.Type, Event> events) { this.events = events; } private static ImmutableMap<Event.Type, Event> toImmutableMap(Collection<Event> events) { ImmutableMap.Builder<Event.Type, Event> builder = new ImmutableMap.Builder<>(); for (Event event : events) builder.put(event.type(), event); return builder.build(); } /** Returns this event if it is present in this history */ public Optional<Event> event(Event.Type type) { return Optional.ofNullable(events.get(type)); } public Collection<Event> events() { return events.values(); } /** Returns a copy of this history with the given event added */ public History with(Event event) { ImmutableMap.Builder<Event.Type, Event> builder = builderWithout(event.type()); builder.put(event.type(), event); return new History(builder.build()); } /** Returns a copy of this history with the given event type removed (or an identical history if it was not present) */ public History without(Event.Type type) { return new History(builderWithout(type).build()); } private ImmutableMap.Builder<Event.Type, Event> builderWithout(Event.Type type) { ImmutableMap.Builder<Event.Type, Event> builder = new ImmutableMap.Builder<>(); for (Event event : events.values()) if (event.type() != type) builder.put(event.type(), event); return builder; } /** Returns a copy of this history with a record of this state transition added, if applicable */ /** * Events can be application or node level. * This returns a copy of this history with all application level events removed. */ private History withoutApplicationEvents() { return new History(events().stream().filter(e -> ! e.type().isApplicationLevel()).collect(Collectors.toList())); } /** Returns the empty history */ public static History empty() { return new History(Collections.emptyList()); } @Override public String toString() { if (events.isEmpty()) return "history: (empty)"; StringBuilder b = new StringBuilder("history: "); for (Event e : events.values()) b.append(e).append(", "); b.setLength(b.length() -2); return b.toString(); } /** An event which may happen to a node */ public static class Event { private final Instant at; private final Agent agent; private final Type type; public Event(Event.Type type, Agent agent, Instant at) { this.type = type; this.agent = agent; this.at = at; } public enum Type { provisioned(false), readied, reserved, activated, deactivated, deallocated, parked, retired, down, requested, rebooted(false), failed(false); private final boolean applicationLevel; /** Creates an application level event */ Type() { this.applicationLevel = true; } Type(boolean applicationLevel) { this.applicationLevel = applicationLevel; } /** Returns true if this is an application level event and false it it is a node level event */ public boolean isApplicationLevel() { return applicationLevel; } } /** Returns the type of event */ public Event.Type type() { return type; } /** Returns the agent causing this event */ public Agent agent() { return agent; } /** Returns the instant this even took place */ public Instant at() { return at; } @Override public String toString() { return "'" + type + "' event at " + at + " by " + agent; } } }
Yes, I forgot about that. Excellent catch!
private static void deferConfigChangesForClustersToBeRestarted(List<ConfigChangeAction> actions, VespaModel model) { Set<ClusterSpec.Id> clustersToBeRestarted = actions.stream() .filter(action -> action.getType() == ConfigChangeAction.Type.RESTART) .map(action -> action.clusterId()) .collect(Collectors.toSet()); for (var clusterToRestart : clustersToBeRestarted) { var containerCluster = model.getContainerClusters().get(clusterToRestart.value()); if (containerCluster != null) containerCluster.deferChangesUntilRestart(); var contentCluster = model.getContentClusters().get(clusterToRestart.value()); if (contentCluster != null) contentCluster.deferChangesUntilRestart(); } }
var containerCluster = model.getContainerClusters().get(clusterToRestart.value());
private static void deferConfigChangesForClustersToBeRestarted(List<ConfigChangeAction> actions, VespaModel model) { Set<ClusterSpec.Id> clustersToBeRestarted = actions.stream() .filter(action -> action.getType() == ConfigChangeAction.Type.RESTART) .filter(action -> action.clusterId() != null) .map(action -> action.clusterId()) .collect(Collectors.toSet()); for (var clusterToRestart : clustersToBeRestarted) { var containerCluster = model.getContainerClusters().get(clusterToRestart.value()); if (containerCluster != null) containerCluster.deferChangesUntilRestart(); var contentCluster = model.getContentClusters().get(clusterToRestart.value()); if (contentCluster != null) contentCluster.deferChangesUntilRestart(); } }
class Validation { /** * Validates the model supplied, and if there already exists a model for the application validates changes * between the previous and current model * * @return a list of required changes needed to make this configuration live */ public static List<ConfigChangeAction> validate(VespaModel model, ValidationParameters validationParameters, DeployState deployState) { if (validationParameters.checkRouting()) { new RoutingValidator().validate(model, deployState); new RoutingSelectorValidator().validate(model, deployState); } new ComponentValidator().validate(model, deployState); new SearchDataTypeValidator().validate(model, deployState); new ComplexAttributeFieldsValidator().validate(model, deployState); new StreamingValidator().validate(model, deployState); new RankSetupValidator(validationParameters.ignoreValidationErrors()).validate(model, deployState); new NoPrefixForIndexes().validate(model, deployState); new DeploymentSpecValidator().validate(model, deployState); new RankingConstantsValidator().validate(model, deployState); new SecretStoreValidator().validate(model, deployState); new EndpointCertificateSecretsValidator().validate(model, deployState); new AccessControlFilterValidator().validate(model, deployState); new CloudWatchValidator().validate(model, deployState); new AwsAccessControlValidator().validate(model, deployState); new QuotaValidator().validate(model, deployState); new UriBindingsValidator().validate(model, deployState); List<ConfigChangeAction> result = Collections.emptyList(); if (deployState.getProperties().isFirstTimeDeployment()) { validateFirstTimeDeployment(model, deployState); } else { Optional<Model> currentActiveModel = deployState.getPreviousModel(); if (currentActiveModel.isPresent() && (currentActiveModel.get() instanceof VespaModel)) { result = validateChanges((VespaModel) currentActiveModel.get(), model, deployState.validationOverrides(), deployState.getDeployLogger(), deployState.now()); deferConfigChangesForClustersToBeRestarted(result, model); } } return result; } private static List<ConfigChangeAction> validateChanges(VespaModel currentModel, VespaModel nextModel, ValidationOverrides overrides, DeployLogger logger, Instant now) { ChangeValidator[] validators = new ChangeValidator[] { new IndexingModeChangeValidator(), new GlobalDocumentChangeValidator(), new IndexedSearchClusterChangeValidator(), new StreamingSearchClusterChangeValidator(), new ConfigValueChangeValidator(logger), new StartupCommandChangeValidator(), new ContentTypeRemovalValidator(), new ContentClusterRemovalValidator(), new ClusterSizeReductionValidator(), new ResourcesReductionValidator(), new ContainerRestartValidator(), new NodeResourceChangeValidator() }; return Arrays.stream(validators) .flatMap(v -> v.validate(currentModel, nextModel, overrides, now).stream()) .collect(toList()); } private static void validateFirstTimeDeployment(VespaModel model, DeployState deployState) { new AccessControlOnFirstDeploymentValidator().validate(model, deployState); } }
class Validation { /** * Validates the model supplied, and if there already exists a model for the application validates changes * between the previous and current model * * @return a list of required changes needed to make this configuration live */ public static List<ConfigChangeAction> validate(VespaModel model, ValidationParameters validationParameters, DeployState deployState) { if (validationParameters.checkRouting()) { new RoutingValidator().validate(model, deployState); new RoutingSelectorValidator().validate(model, deployState); } new ComponentValidator().validate(model, deployState); new SearchDataTypeValidator().validate(model, deployState); new ComplexAttributeFieldsValidator().validate(model, deployState); new StreamingValidator().validate(model, deployState); new RankSetupValidator(validationParameters.ignoreValidationErrors()).validate(model, deployState); new NoPrefixForIndexes().validate(model, deployState); new DeploymentSpecValidator().validate(model, deployState); new RankingConstantsValidator().validate(model, deployState); new SecretStoreValidator().validate(model, deployState); new EndpointCertificateSecretsValidator().validate(model, deployState); new AccessControlFilterValidator().validate(model, deployState); new CloudWatchValidator().validate(model, deployState); new AwsAccessControlValidator().validate(model, deployState); new QuotaValidator().validate(model, deployState); new UriBindingsValidator().validate(model, deployState); List<ConfigChangeAction> result = Collections.emptyList(); if (deployState.getProperties().isFirstTimeDeployment()) { validateFirstTimeDeployment(model, deployState); } else { Optional<Model> currentActiveModel = deployState.getPreviousModel(); if (currentActiveModel.isPresent() && (currentActiveModel.get() instanceof VespaModel)) { result = validateChanges((VespaModel) currentActiveModel.get(), model, deployState.validationOverrides(), deployState.getDeployLogger(), deployState.now()); deferConfigChangesForClustersToBeRestarted(result, model); } } return result; } private static List<ConfigChangeAction> validateChanges(VespaModel currentModel, VespaModel nextModel, ValidationOverrides overrides, DeployLogger logger, Instant now) { ChangeValidator[] validators = new ChangeValidator[] { new IndexingModeChangeValidator(), new GlobalDocumentChangeValidator(), new IndexedSearchClusterChangeValidator(), new StreamingSearchClusterChangeValidator(), new ConfigValueChangeValidator(logger), new StartupCommandChangeValidator(), new ContentTypeRemovalValidator(), new ContentClusterRemovalValidator(), new ClusterSizeReductionValidator(), new ResourcesReductionValidator(), new ContainerRestartValidator(), new NodeResourceChangeValidator() }; return Arrays.stream(validators) .flatMap(v -> v.validate(currentModel, nextModel, overrides, now).stream()) .collect(toList()); } private static void validateFirstTimeDeployment(VespaModel model, DeployState deployState) { new AccessControlOnFirstDeploymentValidator().validate(model, deployState); } }
Does this properly handle multi-catch blocks, ex `catch (IOException | UncheckIOException ex)`?
public void visitToken(DetailAST catchBlockToken) { final DetailAST catchStatement = catchBlockToken.findFirstToken(TokenTypes.PARAMETER_DEF); final String caughtExceptionVariableName = catchStatement.findFirstToken(TokenTypes.IDENT).getText(); final List<DetailAST> throwStatements = getThrowStatements(catchBlockToken); final List<String> wrappedExceptions = getWrappedExceptions(catchBlockToken, catchBlockToken, caughtExceptionVariableName); throwStatements.forEach(throwToken -> { final List<String> throwParamNames = new LinkedList<>(); getThrowParamNames(throwToken, throwParamNames); wrappedExceptions.add(caughtExceptionVariableName); List<String> intersect = wrappedExceptions.stream().filter(throwParamNames::contains).collect(Collectors.toList()); if (intersect.size() == 0) { log(throwToken, String.format(UNUSED_CAUGHT_EXCEPTION_ERROR, caughtExceptionVariableName)); } }); }
final List<String> wrappedExceptions =
public void visitToken(DetailAST catchBlockToken) { final DetailAST catchStatement = catchBlockToken.findFirstToken(TokenTypes.PARAMETER_DEF); final String caughtExceptionVariableName = catchStatement.findFirstToken(TokenTypes.IDENT).getText(); final List<DetailAST> throwStatements = getThrowStatements(catchBlockToken); final List<String> wrappedExceptions = getWrappedExceptions(catchBlockToken, catchBlockToken, caughtExceptionVariableName); throwStatements.forEach(throwToken -> { final List<String> throwParamNames = new LinkedList<>(); getThrowParamNames(throwToken, throwParamNames); wrappedExceptions.add(caughtExceptionVariableName); List<String> intersect = wrappedExceptions.stream().filter(throwParamNames::contains).collect(Collectors.toList()); if (intersect.size() == 0) { log(throwToken, String.format(UNUSED_CAUGHT_EXCEPTION_ERROR, caughtExceptionVariableName)); } }); }
class UseCaughtExceptionCauseCheck extends AbstractCheck { static final String UNUSED_CAUGHT_EXCEPTION_ERROR = "Should use the current exception cause \"%s\"."; @Override public int[] getDefaultTokens() { return getRequiredTokens(); } @Override public int[] getAcceptableTokens() { return getRequiredTokens(); } @Override public int[] getRequiredTokens() { return new int[] {TokenTypes.LITERAL_CATCH}; } @Override /** * Returns the list of exceptions that wrapped the current exception tokens * * @param detailAST catch block throw parent token * @param caughtExceptionVariableName list containing the exception tokens * @return list of wrapped exception tokens */ private List<String> getWrappedExceptions(DetailAST currentCatchAST, DetailAST detailAST, String caughtExceptionVariableName) { final List<String> wrappedExceptionNames = new LinkedList<>(); for (DetailAST currentNode : getChildrenNodes(detailAST)) { if (currentNode.getType() == TokenTypes.IDENT && currentNode.getText().equals(caughtExceptionVariableName)) { getWrappedExceptionVariable(currentCatchAST, wrappedExceptionNames, currentNode); } if (currentNode.getFirstChild() != null) { wrappedExceptionNames.addAll( getWrappedExceptions(currentCatchAST, currentNode, caughtExceptionVariableName)); } } return wrappedExceptionNames; } /** * Returns the wrapped exception variable name */ private void getWrappedExceptionVariable(DetailAST currentCatchBlock, List<String> wrappedExceptionNames, DetailAST currentToken) { DetailAST temp = currentToken; while (!temp.equals(currentCatchBlock) && temp.getType() != TokenTypes.ASSIGN) { temp = temp.getParent(); } if (temp.getType() == TokenTypes.ASSIGN) { final DetailAST wrappedException; if (temp.getParent().getType() == TokenTypes.VARIABLE_DEF) { wrappedException = temp.getParent().findFirstToken(TokenTypes.IDENT); } else { wrappedException = temp.findFirstToken(TokenTypes.IDENT); } if (wrappedException != null) { wrappedExceptionNames.add(wrappedException.getText()); } } } /** * Returns the parameter names for current throw keyword. * * @param throwParent The parent throw token * @param paramNames The list containing the parameter names * @return list of throw param names */ private List<String> getThrowParamNames(DetailAST throwParent, List<String> paramNames) { getChildrenNodes(throwParent).forEach(currentNode -> { if (currentNode.getType() == TokenTypes.IDENT) { paramNames.add(currentNode.getText()); } if (currentNode.getFirstChild() != null) { getThrowParamNames(currentNode, paramNames); } }); return paramNames; } /** * Recursive method that searches for all the LITERAL_THROW on the current catch token. * * @param catchBlockToken A start token. * @return list of throw tokens */ private List<DetailAST> getThrowStatements(DetailAST catchBlockToken) { final List<DetailAST> throwStatements = new LinkedList<>(); getChildrenNodes(catchBlockToken).forEach(currentNode -> { if (TokenTypes.LITERAL_THROW == currentNode.getType()) { throwStatements.add(currentNode); } if (currentNode.getFirstChild() != null) { throwStatements.addAll(getThrowStatements(currentNode)); } }); return throwStatements; } /** * Gets all the children of the current parent node. * * @param token parent node. * @return List of children of the current node. */ private static List<DetailAST> getChildrenNodes(DetailAST token) { final List<DetailAST> result = new LinkedList<>(); DetailAST currNode = token.getFirstChild(); while (currNode != null) { result.add(currNode); currNode = currNode.getNextSibling(); } return result; } }
class UseCaughtExceptionCauseCheck extends AbstractCheck { static final String UNUSED_CAUGHT_EXCEPTION_ERROR = "Caught and rethrown exceptions should include the caught" + " exception as the cause in the rethrown exception. Dropping the causal exception makes it more difficult" + " to troubleshoot issues when they arise. Include the caught exception variable %s as the cause."; @Override public int[] getDefaultTokens() { return getRequiredTokens(); } @Override public int[] getAcceptableTokens() { return getRequiredTokens(); } @Override public int[] getRequiredTokens() { return new int[] {TokenTypes.LITERAL_CATCH}; } @Override /** * Returns the list of exceptions that wrapped the current exception tokens * * @param currentCatchAST current catch block token * @param detailAST catch block throw parent token * @param caughtExceptionVariableName list containing the exception tokens * @return list of wrapped exception tokens */ private List<String> getWrappedExceptions(DetailAST currentCatchAST, DetailAST detailAST, String caughtExceptionVariableName) { final List<String> wrappedExceptionNames = new LinkedList<>(); for (DetailAST currentNode : getChildrenNodes(detailAST)) { if (currentNode.getType() == TokenTypes.IDENT && currentNode.getText().equals(caughtExceptionVariableName)) { getWrappedExceptionVariable(currentCatchAST, wrappedExceptionNames, currentNode); } if (currentNode.getFirstChild() != null) { wrappedExceptionNames.addAll( getWrappedExceptions(currentCatchAST, currentNode, caughtExceptionVariableName)); } } return wrappedExceptionNames; } /** * Returns the wrapped exception variable name */ private void getWrappedExceptionVariable(DetailAST currentCatchBlock, List<String> wrappedExceptionNames, DetailAST currentToken) { DetailAST temp = currentToken; while (!temp.equals(currentCatchBlock) && temp.getType() != TokenTypes.ASSIGN) { temp = temp.getParent(); } if (temp.getType() == TokenTypes.ASSIGN) { final DetailAST wrappedException; if (temp.getParent().getType() == TokenTypes.VARIABLE_DEF) { wrappedException = temp.getParent().findFirstToken(TokenTypes.IDENT); } else if (temp.findFirstToken(TokenTypes.DOT) != null) { wrappedException = temp.findFirstToken(TokenTypes.DOT).findFirstToken(TokenTypes.IDENT); } else { wrappedException = temp.findFirstToken(TokenTypes.IDENT); } if (wrappedException != null) { wrappedExceptionNames.add(wrappedException.getText()); } } } /** * Returns the parameter names for current throw keyword. * * @param throwParent The parent throw token * @param paramNames The list containing the parameter names * @return list of throw param names */ private List<String> getThrowParamNames(DetailAST throwParent, List<String> paramNames) { getChildrenNodes(throwParent).forEach(currentNode -> { if (currentNode.getType() == TokenTypes.IDENT) { paramNames.add(currentNode.getText()); } if (currentNode.getFirstChild() != null) { getThrowParamNames(currentNode, paramNames); } }); return paramNames; } /** * Recursive method that searches for all the LITERAL_THROW on the current catch token. * * @param catchBlockToken A start token. * @return list of throw tokens */ private List<DetailAST> getThrowStatements(DetailAST catchBlockToken) { final List<DetailAST> throwStatements = new LinkedList<>(); getChildrenNodes(catchBlockToken).forEach(currentNode -> { if (TokenTypes.LITERAL_THROW == currentNode.getType()) { throwStatements.add(currentNode); } if (currentNode.getFirstChild() != null) { throwStatements.addAll(getThrowStatements(currentNode)); } }); return throwStatements; } /** * Gets all the children by traversing the tree generated from the current parent node. * * @param token parent node. * @return List of children of the current node. */ private static List<DetailAST> getChildrenNodes(DetailAST token) { final List<DetailAST> result = new LinkedList<>(); DetailAST currNode = token.getFirstChild(); while (currNode != null) { result.add(currNode); currNode = currNode.getNextSibling(); } return result; } }
I run ValidatesRunner tests and the result is green.
private void checkNoMoreMatches(final List<PTransformOverride> overrides) { traverseTopologically( new PipelineVisitor.Defaults() { SetMultimap<Node, PTransformOverride> matched = HashMultimap.create(); @Override public CompositeBehavior enterCompositeTransform(Node node) { if (!node.isRootNode()) { checkForMatches(node); } if (matched.containsKey(node)) { return CompositeBehavior.DO_NOT_ENTER_TRANSFORM; } else { return CompositeBehavior.ENTER_TRANSFORM; } } @Override public void leaveCompositeTransform(Node node) { if (node.isRootNode()) { checkState( matched.isEmpty(), "Found nodes that matched overrides. Matches: %s", matched); } } @Override public void visitPrimitiveTransform(Node node) { checkForMatches(node); } private void checkForMatches(Node node) { for (PTransformOverride override : overrides) { if (override.getMatcher() .matchesDuringValidation(node.toAppliedPTransform(getPipeline()))) { matched.put(node, override); } } } }); }
SetMultimap<Node, PTransformOverride> matched = HashMultimap.create();
private void checkNoMoreMatches(final List<PTransformOverride> overrides) { traverseTopologically( new PipelineVisitor.Defaults() { SetMultimap<Node, PTransformOverride> matched = HashMultimap.create(); @Override public CompositeBehavior enterCompositeTransform(Node node) { if (!node.isRootNode()) { checkForMatches(node); } if (matched.containsKey(node)) { return CompositeBehavior.DO_NOT_ENTER_TRANSFORM; } else { return CompositeBehavior.ENTER_TRANSFORM; } } @Override public void leaveCompositeTransform(Node node) { if (node.isRootNode()) { checkState( matched.isEmpty(), "Found nodes that matched overrides. Matches: %s", matched); } } @Override public void visitPrimitiveTransform(Node node) { checkForMatches(node); } private void checkForMatches(Node node) { for (PTransformOverride override : overrides) { if (override .getMatcher() .matchesDuringValidation(node.toAppliedPTransform(getPipeline()))) { matched.put(node, override); } } } }); }
class PipelineExecutionException extends RuntimeException { /** * Wraps {@code cause} into a {@link PipelineExecutionException}. */ public PipelineExecutionException(Throwable cause) { super(cause); } }
class PipelineExecutionException extends RuntimeException { /** Wraps {@code cause} into a {@link PipelineExecutionException}. */ public PipelineExecutionException(Throwable cause) { super(cause); } }
Also, when you start a context in the parser. You have to start a context in error handler in parrelly. Need to add this context to the `startContextIfRequired()` method in errorhandler
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); return parseConstDecl(metadata, qualifier, constKeyword); } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: endContext(); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: STNode constantDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); endContext(); return constantDecl; default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor given the type. * <p> * <code>optional-type-descriptor := type-descriptor `?`</code> * </p> * * @param typeDescriptorNode Preceding type descriptor * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken); } private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) { if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode; STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken); typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode; STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken); typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { typeDescNode = validateForUsageOfVar(typeDescNode); typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken); } return typeDescNode; } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := array-member-type-descriptor [ [ array-length ] ] * array-member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); if (arrayLengthNode != null) { switch (arrayLengthNode.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind; if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken, arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); arrayLengthNode = STNodeFactory.createEmptyNode(); } } List<STNode> arrayDimensions = new ArrayList(); if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) { STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc; STNode innerArrayDimensions = innerArrayType.dimensions; int dimensionCount = innerArrayDimensions.bucketCount(); for (int i = 0; i < dimensionCount; i++) { arrayDimensions.add(innerArrayDimensions.childInBucket(i)); } memberTypeDesc = innerArrayType.memberTypeDesc; } STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode, closeBracketToken); arrayDimensions.add(arrayDimension); STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the <a href="https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (isPredeclaredIdentifier(peek().kind)) { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } else { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse type test expression. * <code> * type-test-expr := expression (is | !is) type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isOrNotIsKeyword = parseIsOrNotIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor); } /** * Parse `is` keyword or `!is` keyword. * * @return is-keyword or not-is-keyword node */ private STNode parseIsOrNotIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD || token.kind == SyntaxKind.NOT_IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsOrNotIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { SyntaxKind nextTokenKind = peek().kind; if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) { return getExpressionAsStatement(expression); } switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case IDENTIFIER_TOKEN: default: if (isCompoundAssignment(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: return parseCallStatement(expression); case CHECK_EXPRESSION: return parseCheckStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: case CLIENT_RESOURCE_ACCESS_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); expression = getExpression(expression); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case SIMPLE_NAME_REFERENCE: STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr; if (nameRef.name.isMissing()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } break; case ASTERISK_LITERAL: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * </p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { return parseCallStatementOrCheckStatement(expression); } /** * <p> * Parse checking statement. * </p> * <code> * checking-stmt := checking-expr ; * <br/> * checking-expr := checking-keyword expr ; * </code> * * @param expression Checking expression associated with the checking statement * @return Checking statement node */ private STNode parseCheckStatement(STNode expression) { return parseCallStatementOrCheckStatement(expression); } private STNode parseCallStatementOrCheckStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse client resource access action, given the starting expression. * <br/><br/> * <code> * client-resource-access-action := expression "->" "/" [resource-access-path] ["." method-name] ["(" arg-list ")"] * </code> * * @param expression Expression * @param rightArrow Right arrow token * @param slashToken Slash token * @return Parsed node */ private STNode parseClientResourceAccessAction(STNode expression, STNode rightArrow, STNode slashToken, boolean isRhsExpr, boolean isInMatchGuard) { startContext(ParserRuleContext.CLIENT_RESOURCE_ACCESS_ACTION); STNode resourceAccessPath = parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard); STNode resourceAccessMethodDot = parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard); STNode resourceAccessMethodName = STNodeFactory.createEmptyNode(); if (resourceAccessMethodDot != null) { resourceAccessMethodName = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); } STNode resourceMethodCallArgList = parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard); endContext(); return STNodeFactory.createClientResourceAccessActionNode(expression, rightArrow, slashToken, resourceAccessPath, resourceAccessMethodDot, resourceAccessMethodName, resourceMethodCallArgList); } private STNode parseOptionalResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) { STNode resourceAccessPath = STNodeFactory.createEmptyNodeList(); STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: resourceAccessPath = parseResourceAccessPath(isRhsExpr, isInMatchGuard); break; case DOT_TOKEN: case OPEN_PAREN_TOKEN: break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_PATH); return parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard); } return resourceAccessPath; } private STNode parseOptionalResourceAccessMethodDot(boolean isRhsExpr, boolean isInMatchGuard) { STNode dotToken = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); switch (nextToken.kind) { case DOT_TOKEN: dotToken = consume(); break; case OPEN_PAREN_TOKEN: break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_METHOD); return parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard); } return dotToken; } private STNode parseOptionalResourceAccessActionArgList(boolean isRhsExpr, boolean isInMatchGuard) { STNode argList = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: argList = parseParenthesizedArgList(); break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_ACTION_ARG_LIST); return parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard); } return argList; } /** * Parse resource access path. * <br/><br/> * <code> * resource-access-path := * resource-access-segments ["/" resource-access-rest-segment] * | resource-access-rest-segment * <br/><br/> * resource-access-segments := resource-access-segment ("/" resource-access-segment ")* * <br/><br/> * resource-access-segment := resource-path-segment-name | computed-resource-access-segment * <br/><br/> * resource-path-segment-name := identifier * </code> * @return */ private STNode parseResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) { List<STNode> pathSegmentList = new ArrayList<>(); STNode pathSegment = parseResourceAccessSegment(); pathSegmentList.add(pathSegment); STNode leadingSlash; STNode previousPathSegmentNode = pathSegment; while (!isEndOfResourceAccessPathSegments(peek(), isRhsExpr, isInMatchGuard)) { leadingSlash = parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard); if (leadingSlash == null) { break; } pathSegment = parseResourceAccessSegment(); if (previousPathSegmentNode.kind == SyntaxKind.RESOURCE_ACCESS_REST_SEGMENT) { updateLastNodeInListWithInvalidNode(pathSegmentList, leadingSlash, null); updateLastNodeInListWithInvalidNode(pathSegmentList, pathSegment, DiagnosticErrorCode.RESOURCE_ACCESS_SEGMENT_IS_NOT_ALLOWED_AFTER_REST_SEGMENT); } else { pathSegmentList.add(leadingSlash); pathSegmentList.add(pathSegment); previousPathSegmentNode = pathSegment; } } return STNodeFactory.createNodeList(pathSegmentList); } private STNode parseResourceAccessSegment() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return consume(); case OPEN_BRACKET_TOKEN: return parseComputedOrResourceAccessRestSegment(consume()); default: recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_PATH_SEGMENT); return parseResourceAccessSegment(); } } /** * Parse computed resource segment or resource access rest segment. * <code> * <br/> * computed-resource-access-segment := "[" expression "]" * <br/> * resource-access-rest-segment := "[" "..." expression "]" * </code> * @param openBracket Open bracket token * @return Parsed node */ private STNode parseComputedOrResourceAccessRestSegment(STNode openBracket) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STNode ellipsisToken = consume(); STNode expression = parseExpression(); STNode closeBracketToken = parseCloseBracket(); return STNodeFactory.createResourceAccessRestSegmentNode(openBracket, ellipsisToken, expression, closeBracketToken); default: if (isValidExprStart(nextToken.kind)) { expression = parseExpression(); closeBracketToken = parseCloseBracket(); return STNodeFactory.createComputedResourceAccessSegmentNode(openBracket, expression, closeBracketToken); } recover(nextToken, ParserRuleContext.COMPUTED_SEGMENT_OR_REST_SEGMENT); return parseComputedOrResourceAccessRestSegment(openBracket); } } /** * Parse resource access segment end. * * @return Parsed node */ private STNode parseResourceAccessSegmentRhs(boolean isRhsExpr, boolean isInMatchGuard) { STToken nextToken = peek(); switch (nextToken.kind) { case SLASH_TOKEN: return consume(); default: if (isEndOfResourceAccessPathSegments(nextToken, isRhsExpr, isInMatchGuard)) { return null; } recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_SEGMENT_RHS); return parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard); } } private boolean isEndOfResourceAccessPathSegments(STToken nextToken, boolean isRhsExpr, boolean isInMatchGuard) { switch (nextToken.kind) { case DOT_TOKEN: case OPEN_PAREN_TOKEN: return true; default: return isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard); } } private STNode parseRemoteMethodCallOrClientResourceAccessOrAsyncSendAction(STNode expression, boolean isRhsExpr, boolean isInMatchGuard) { STNode rightArrow = parseRightArrow(); return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard); } private STNode parseClientResourceAccessOrAsyncSendActionRhs(STNode expression, STNode rightArrow, boolean isRhsExpr, boolean isInMatchGuard) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword); return parseAsyncSendAction(expression, rightArrow, name); case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; case SLASH_TOKEN: STNode slashToken = consume(); return parseClientResourceAccessAction(expression, rightArrow, slashToken, isRhsExpr, isInMatchGuard); default: if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || isEndOfActionOrExpression(nextNextToken, isRhsExpr, isInMatchGuard) || nextToken.isMissing()) { name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; } } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_OR_RESOURCE_CALL_OR_ASYNC_SEND_RHS); if (solution.action == Action.KEEP) { name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; } return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: case CLOSE_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case COMMA_TOKEN: case FROM_KEYWORD: case JOIN_KEYWORD: case ON_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case ORDER_KEYWORD: case LIMIT_KEYWORD: case SELECT_KEYWORD: return parseAsyncSendAction(expression, rightArrow, name); default: if (isGroupOrCollectKeyword(nextToken)) { return parseAsyncSendAction(expression, rightArrow, name); } recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } /** * Parse remote method call action. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param expression LHS expression * @param rightArrow right arrow token * @param name remote method name * @return */ private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseArgListOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseArgListCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse map type descriptor. * map-type-descriptor := `map` type-parameter * * @return Parsed node */ private STNode parseMapTypeDescriptor(STNode mapKeyword) { STNode typeParameter = parseTypeParameter(); return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter); } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := `typedesc` [type-parameter] * <br/>&nbsp;| `future` [type-parameter] * <br/>&nbsp;| `xml` [type-parameter] * <br/>&nbsp;| `error` [type-parameter] * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor(STNode keywordToken) { STNode typeParamNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typeParamNode = parseTypeParameter(); } else { typeParamNode = STNodeFactory.createEmptyNode(); } SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken, typeParamNode); } private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) { switch (keywordToken.kind) { case TYPEDESC_KEYWORD: return SyntaxKind.TYPEDESC_TYPE_DESC; case FUTURE_KEYWORD: return SyntaxKind.FUTURE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case ERROR_KEYWORD: default: return SyntaxKind.ERROR_TYPE_DESC; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * type * | class * | [object|service remote] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD); STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent); attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList); attachPoint = SyntaxErrors.addDiagnostic(attachPoint, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN && !this.tokenReader.head().hasTrailingNewline()) { STToken nextNonVirtualToken = this.tokenReader.read(); updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text()); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: case CLASS_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := type | class | [object|service remote] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case SERVICE_KEYWORD: return parseServiceAttachPoint(sourceKeyword, firstIdent); case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: default: STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } /** * Parse remote ident. * * @return Parsed node */ private STNode parseRemoteIdent() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_IDENT); return parseRemoteIdent(); } } /** * Parse service attach point. * <code>service-attach-point := service | service remote function</code> * * @return Parsed node */ private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) { STNode identList; STToken token = peek(); switch (token.kind) { case REMOTE_KEYWORD: STNode secondIdent = parseRemoteIdent(); STNode thirdIdent = parseFunctionIdent(); identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case COMMA_TOKEN: case SEMICOLON_TOKEN: identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); default: recover(token, ParserRuleContext.SERVICE_IDENT_RHS); return parseServiceAttachPoint(sourceKeyword, firstIdent); } } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { STToken nextToken = peek(); switch (nextToken.kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: if (isPredeclaredIdentifier(nextToken.kind)) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt } * [on-fail-clause]</code> * * @param annots Annotations attached to the worker decl * @param qualifiers Preceding transactional keyword in a list * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode transactionalKeyword = getTransactionalKeyword(qualifiers); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName, returnTypeDesc, workerBody, onFailClause); } private STNode getTransactionalKeyword(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode transactionalKeyword; if (validatedList.isEmpty()) { transactionalKeyword = STNodeFactory.createEmptyNode(); } else { transactionalKeyword = validatedList.get(0); } return transactionalKeyword; } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause]</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false, TypePrecedence.UNION); return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Creates a union type descriptor after validating lhs and rhs types. * <p> * <i>Note: Since type precedence and associativity are not taken into account here, * this method should not be called directly when types are unknown. * <br/> * Call {@link * * @param leftTypeDesc lhs type * @param pipeToken pipe token * @param rightTypeDesc rhs type * @return a UnionTypeDescriptorNode */ private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { return isTypeStartingToken(nodeKind, getNextNextToken()); } private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: case TRANSACTION_KEYWORD: return true; default: if (isParameterizedTypeToken(nextTokenKind)) { return true; } if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) { return true; } return isSimpleType(nextTokenKind); } } /** * Check if the token kind is a type descriptor in terminal expression. * <p> * simple-type-in-expr := * boolean | int | byte | float | decimal | string | handle | json | anydata | any | never * * @param nodeKind token kind to check * @return <code>true</code> for simple type token in expression. <code>false</code> otherwise. */ private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) { switch (nodeKind) { case VAR_KEYWORD: case READONLY_KEYWORD: return false; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case VAR_KEYWORD: case READONLY_KEYWORD: return true; default: return false; } } static boolean isPredeclaredPrefix(SyntaxKind nodeKind) { switch (nodeKind) { case BOOLEAN_KEYWORD: case DECIMAL_KEYWORD: case ERROR_KEYWORD: case FLOAT_KEYWORD: case FUNCTION_KEYWORD: case FUTURE_KEYWORD: case INT_KEYWORD: case MAP_KEYWORD: case OBJECT_KEYWORD: case STREAM_KEYWORD: case STRING_KEYWORD: case TABLE_KEYWORD: case TRANSACTION_KEYWORD: case TYPEDESC_KEYWORD: case XML_KEYWORD: return true; default: return false; } } private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) { return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN; } private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; default: assert false : typeKeyword + " is not a built-in type"; return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (validateStatement(stmt)) { continue; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ list-members ] ] * <br/> * list-members := list-member (, list-member)* * <br/> * list-member := expression | spread-member * <br/> * spread-member := ... expression * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode listMembers = parseListMembers(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket); } /** * Parse optional list member list. * * @return Parsed node */ private STNode parseListMembers() { List<STNode> listMembers = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode listMember = parseListMember(); listMembers.add(listMember); return parseListMembers(listMembers); } private STNode parseListMembers(List<STNode> listMembers) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } listMembers.add(listConstructorMemberEnd); STNode listMember = parseListMember(); listMembers.add(listMember); } return STNodeFactory.createNodeList(listMembers); } /** * Parse list member. * <p> * <code> * list-member := expression | spread-member * </code> * * @return Parsed node */ private STNode parseListMember() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { return parseSpreadMember(); } else { return parseExpression(); } } /** * Parse spread member. * <p> * <code> * spread-member := ... expression * </code> * * @return Parsed node */ private STNode parseSpreadMember() { STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadMemberNode(ellipsis, expr); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr); } private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } private STToken getUnderscoreKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * <br/> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) { STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr, false); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken, getNextNextToken())) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(context, isRhsExpr, allowActions); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken, getNextNextToken())) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(context, isRhsExpr, allowActions); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } static boolean isEndOfLetVarDeclarations(STToken nextToken, STToken nextNextToken) { SyntaxKind tokenKind = nextToken.kind; switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return isGroupOrCollectKeyword(nextToken) || !isTypeStartingToken(tokenKind, nextNextToken); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = context == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL ? parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions) : parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { return createMissingTemplateExpressionNode(xmlKeyword, SyntaxKind.XML_TEMPLATE_EXPRESSION); } STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse regular expression constructor. * <p> * <code>regexp-constructor-expr := re BacktickString</code> * * @return Regular expression template expression */ private STNode parseRegExpTemplateExpression() { STNode reKeyword = consume(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { return createMissingTemplateExpressionNode(reKeyword, SyntaxKind.REGEX_TEMPLATE_EXPRESSION); } STNode content = parseTemplateContentAsRegExp(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.REGEX_TEMPLATE_EXPRESSION, reKeyword, startingBackTick, content, endingBackTick); } private STNode createMissingTemplateExpressionNode(STNode reKeyword, SyntaxKind kind) { STNode startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STAbstractNodeFactory.createEmptyNodeList(); STNode templateExpr = STNodeFactory.createTemplateExpressionNode(kind, reKeyword, startingBackTick, content, endingBackTick); templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING); return templateExpr; } /** * Parse the content of the template string as regular expression. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as regular expression. * * @return Template expression node */ private STNode parseTemplateContentAsRegExp() { this.tokenReader.startMode(ParserMode.REGEXP); ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder regExpStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { regExpStringBuilder.append(((STToken) contentItem).text()); } else { regExpStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } this.tokenReader.endMode(); CharReader charReader = CharReader.from(regExpStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new RegExpLexer(charReader)); RegExpParser regExpParser = new RegExpParser(tokenReader, expressions); return regExpParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (!isEndOfInterpolation()) { STToken nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } private boolean isEndOfInterpolation() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: ParserMode currentLexerMode = this.tokenReader.getCurrentMode(); return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION && currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT; } } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor(STNode tableKeywordToken) { STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code> * function-type-descriptor := function-quals function function-signature * <br/>&nbsp;| [isolated] function * <br/> * function-quals := (transactional | isolated)* * </code> * * @param qualifiers Preceding type descriptor qualifiers * @return Function type descriptor node */ private STNode parseFunctionTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); boolean hasFuncSignature = false; STNode signature = STNodeFactory.createEmptyNode(); if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN || isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) { signature = parseFuncSignature(true); hasFuncSignature = true; } STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature); STNode qualifierList = nodes[0]; functionKeyword = nodes[1]; endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature); } private STNode getLastNodeInList(List<STNode> nodeList) { return nodeList.get(nodeList.size() - 1); } private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword, boolean hasFuncSignature) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode nodeList = STNodeFactory.createNodeList(validatedList); return new STNode[]{ nodeList, functionKeyword }; } private boolean isRegularFuncQual(SyntaxKind tokenKind) { switch (tokenKind) { case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := * [annots] (isolated| transactional) function function-signature anon-func-body</code> * * @param annots Annotations. * @param qualifiers Function qualifiers * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true); STNode qualifierList = nodes[0]; funcKeyword = nodes[1]; STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; case NIL_LITERAL: STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params; params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken, STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param bracedExpression Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) { List<STNode> paramList = new ArrayList<>(); STNode innerExpression = bracedExpression.expression; STNode openParen = bracedExpression.openParen; if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { paramList.add(innerExpression); } else { openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); } return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, STNodeFactory.createNodeList(paramList), bracedExpression.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * member-type-descriptor := [annots] type-descriptor * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TUPLE_MEMBERS); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTupleMember(); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode firstMember, List<STNode> memberList) { STNode tupleMemberRhs; while (!isEndOfTypeList(peek().kind)) { if (firstMember.kind == SyntaxKind.REST_TYPE) { firstMember = invalidateTypeDescAfterRestDesc(firstMember); break; } tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } memberList.add(firstMember); memberList.add(tupleMemberRhs); firstMember = parseTupleMember(); } memberList.add(firstMember); return STNodeFactory.createNodeList(memberList); } private STNode parseTupleMember() { STNode annot = parseOptionalAnnotations(); STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); return createMemberOrRestNode(annot, typeDesc); } private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) { STNode tupleMemberRhs = parseTypeDescInTupleRhs(); if (tupleMemberRhs != null) { if (!((STNodeList) annot).isEmpty()) { typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot, DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR); } return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); } return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc); } private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) { while (!isEndOfTypeList(peek().kind)) { STNode tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null); restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(), DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR); } return restDescriptor; } private STNode parseTupleMemberRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS); return parseTupleMemberRhs(); } } private STNode parseTypeDescInTupleRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTypeDescInTupleRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? * <br/> * query-construct-type := table key-specifier | stream | map * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr, boolean allowActions) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr, allowActions); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr, boolean allowActions) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions); case STREAM_KEYWORD: case MAP_KEYWORD: STNode streamOrMapKeyword = consume(); STNode keySpecifier = STNodeFactory.createEmptyNode(); queryConstructType = parseQueryConstructType(streamOrMapKeyword, keySpecifier); return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START); return parseTableConstructorOrQueryInternal(isRhsExpr, allowActions); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr, boolean allowActions) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr, boolean allowActions) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr, allowActions); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream | map</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr, boolean allowActions) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr, allowActions); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; STNode collectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr, allowActions); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } else if (collectClause != null) { collectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(collectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_COLLECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else if (intermediateClause.kind == SyntaxKind.COLLECT_CLAUSE) { collectClause = intermediateClause; } else { clauses.add(intermediateClause); continue; } if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek())) { break; } } if (peek().kind == SyntaxKind.DO_KEYWORD && (!isNestedQueryExpr() || (selectClause == null && collectClause == null))) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, collectClause); } if (selectClause == null && collectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause == null ? collectClause : selectClause, onConflictClause); } /** * Check whether currently parsing query expr is a nested query expression. * * @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise. */ private boolean isNestedQueryExpr() { return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1; } private boolean isValidIntermediateQueryStart(STToken token) { switch (token.kind) { case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case JOIN_KEYWORD: case OUTER_KEYWORD: case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: case LIMIT_KEYWORD: return true; case IDENTIFIER_TOKEN: return isGroupOrCollectKeyword(token); default: return false; } } private static boolean isGroupOrCollectKeyword(STToken nextToken) { return isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken) || isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken); } static boolean isKeywordMatch(SyntaxKind syntaxKind, STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && syntaxKind.stringValue().equals(token.text()); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr, boolean allowActions) { STToken nextToken = peek(); switch (nextToken.kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr, allowActions); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr, allowActions); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr, allowActions); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case LIMIT_KEYWORD: return parseLimitClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: return null; default: if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken)) { return parseCollectClause(isRhsExpr); } if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken)) { return parseGroupByClause(isRhsExpr); } recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS); return parseIntermediateClause(isRhsExpr, allowActions); } } private STNode parseCollectClause(boolean isRhsExpr) { startContext(ParserRuleContext.COLLECT_CLAUSE); STNode collectKeyword = parseCollectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createCollectClauseNode(collectKeyword, expression); } /** * Parse collect-keyword. * * @return collect-keyword node */ private STNode parseCollectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COLLECT_KEYWORD) { return consume(); } if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, token)) { return getCollectKeyword(consume()); } recover(token, ParserRuleContext.COLLECT_KEYWORD); return parseCollectKeyword(); } private STNode getCollectKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.COLLECT_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: case ON_KEYWORD: case CONFLICT_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr, boolean allowActions) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse limit-keyword. * * @return limit-keyword node */ private STNode parseLimitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LIMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LIMIT_KEYWORD); return parseLimitKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr, boolean allowActions) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr, allowActions); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse group by clause. * <code>group-by-clause := group by grouping-key-list</code> * * @return Parsed node */ private STNode parseGroupByClause(boolean isRhsExpr) { startContext(ParserRuleContext.GROUP_BY_CLAUSE); STNode groupKeyword = parseGroupKeyword(); STNode byKeyword = parseByKeyword(); STNode groupingKeys = parseGroupingKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(groupingKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_GROUPING_KEY); endContext(); return STNodeFactory.createGroupByClauseNode(groupKeyword, byKeyword, groupingKeys); } /** * Parse group-keyword. * * @return group-keyword node */ private STNode parseGroupKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.GROUP_KEYWORD) { return consume(); } if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, token)) { return getGroupKeyword(consume()); } recover(token, ParserRuleContext.GROUP_KEYWORD); return parseGroupKeyword(); } private STNode getGroupKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.GROUP_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse grouping key. * <code>grouping-key-list := grouping-key ["," grouping-key]*</code> * * @return Parsed node */ private STNode parseGroupingKeyList(boolean isRhsExpr) { List<STNode> groupingKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfGroupByKeyListElement(nextToken)) { return STNodeFactory.createEmptyNodeList(); } STNode groupingKey = parseGroupingKey(isRhsExpr); groupingKeys.add(groupingKey); nextToken = peek(); STNode groupingKeyListMemberEnd; while (!isEndOfGroupByKeyListElement(nextToken)) { groupingKeyListMemberEnd = parseGroupingKeyListMemberEnd(); if (groupingKeyListMemberEnd == null) { break; } groupingKeys.add(groupingKeyListMemberEnd); groupingKey = parseGroupingKey(isRhsExpr); groupingKeys.add(groupingKey); nextToken = peek(); } return STNodeFactory.createNodeList(groupingKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfGroupByKeyListElement(STToken nextToken) { switch (nextToken.kind) { case COMMA_TOKEN: return false; case EOF_TOKEN: return true; default: return isQueryClauseStartToken(nextToken); } } private boolean isEndOfOrderKeys(STToken nextToken) { switch (nextToken.kind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(nextToken); } } private boolean isQueryClauseStartToken(STToken nextToken) { switch (nextToken.kind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; case IDENTIFIER_TOKEN: return isGroupOrCollectKeyword(nextToken); default: return false; } } private STNode parseGroupingKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken)) { return null; } recover(peek(), ParserRuleContext.GROUPING_KEY_LIST_ELEMENT_END); return parseGroupingKeyListMemberEnd(); } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } private STNode parseGroupingKeyVariableDeclaration(boolean isRhsExpr) { STNode groupingKeyElementTypeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER_IN_GROUPING_KEY); startContext(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STNode groupingKeySimpleBP = createCaptureOrWildcardBP(parseVariableName()); endContext(); STNode equalsToken = parseAssignOp(); STNode groupingKeyExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createGroupingKeyVarDeclarationNode(groupingKeyElementTypeDesc, groupingKeySimpleBP, equalsToken, groupingKeyExpression); } /** * Parse grouping key. * <code>grouping-key := variable-name | inferable-type-descriptor variable-name "=" expression</code> * * @return Parsed node */ private STNode parseGroupingKey(boolean isRhsExpr) { STToken nextToken = peek(); SyntaxKind nextTokenKind = nextToken.kind; if (nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && !isPossibleGroupingKeyVarDeclaration()) { return STNodeFactory.createSimpleNameReferenceNode(parseVariableName()); } else if (isTypeStartingToken(nextTokenKind, nextToken)) { return parseGroupingKeyVariableDeclaration(isRhsExpr); } recover(nextToken, ParserRuleContext.GROUPING_KEY_LIST_ELEMENT); return parseGroupingKey(isRhsExpr); } private boolean isPossibleGroupingKeyVarDeclaration() { SyntaxKind nextNextTokenKind = getNextNextToken().kind; return nextNextTokenKind == SyntaxKind.EQUAL_TOKEN || nextNextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && peek(3).kind == SyntaxKind.EQUAL_TOKEN; } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr, boolean allowActions) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); endContext(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STNode limitKeyword = parseLimitKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case FIELD_ACCESS: case ASYNC_SEND_ACTION: expr = generateValidExprForStartAction(expr); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, STNodeFactory.createEmptyNodeList(), closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } private STNode generateValidExprForStartAction(STNode expr) { STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); switch (expr.kind) { case FIELD_ACCESS: STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr; return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression, fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments, closeParenToken); case ASYNC_SEND_ACTION: STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr; return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression, asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments, closeParenToken); default: return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); } } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false, TypePrecedence.INTERSECTION); return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Creates an intersection type descriptor after validating lhs and rhs types. * <p> * <i>Note: Since type precedence and associativity are not taken into account here, * this method should not be called directly when types are unknown. * <br/> * Call {@link * * @param leftTypeDesc lhs type * @param bitwiseAndToken bitwise-and token * @param rightTypeDesc rhs type * @return an IntersectionTypeDescriptorNode */ private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) { switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) { nextTokenIndex++; return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); } return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case BASE16_KEYWORD: case BASE64_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case TABLE_KEYWORD: case MAP_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: case RE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action | alternate-receive-action</code> * <p><code> * single-receive-action := <- peer-worker * <br></br> * multiple-receive-action := <- { receive-field (, receive-field)* } * <br></br> * alternate-receive-action := <- peer-worker (| peer-worker)* * </code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: return parseSingleOrAlternateReceiveWorkers(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } private STNode parseSingleOrAlternateReceiveWorkers() { startContext(ParserRuleContext.SINGLE_OR_ALTERNATE_WORKER); List<STNode> workers = new ArrayList<>(); STNode peerWorker = parsePeerWorkerName(); workers.add(peerWorker); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.PIPE_TOKEN) { endContext(); return peerWorker; } while (nextToken.kind == SyntaxKind.PIPE_TOKEN) { STNode pipeToken = consume(); workers.add(pipeToken); peerWorker = parsePeerWorkerName(); workers.add(peerWorker); nextToken = peek(); } endContext(); return STNodeFactory.createAlternateReceiveNode(STNodeFactory.createNodeList(workers)); } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); return STNodeFactory.createSimpleNameReferenceNode(functionKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createReceiveFieldNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * This method should only be called by seeing a `DOUBLE_GT_TOKEN` or * by seeing a `GT_TOKEN` followed by a `GT_TOKEN` * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode firstToken = consume(); if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) { return firstToken; } STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(firstToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or * by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode firstToken = consume(); if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) { return firstToken; } STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, firstToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(firstToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { STToken nextToken = peek(); if (!isPredeclaredIdentifier(nextToken.kind)) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); return parseQualifiedIdentifier(identifier, isInConditionalExpr); } return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, STNode collectClause) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } if (collectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, collectClause, DiagnosticErrorCode.ERROR_COLLECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @param isInConditionalExpr whether calling from a conditional-expr * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); if (peek().kind != SyntaxKind.COLON_TOKEN) { if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) { STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr; STNode innerMiddleExpr = innerConditionalExpr.middleExpression; STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false); if (rightMostQNameRef != null) { middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression, innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef); endContext(); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, innerConditionalExpr.colonToken, innerConditionalExpr.endExpression); } STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true); if (leftMostQNameRef != null) { middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression, innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef); endContext(); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, innerConditionalExpr.colonToken, innerConditionalExpr.endExpression); } } STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false); if (rightMostQNameRef != null) { endContext(); return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef); } STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true); if (leftMostQNameRef != null) { endContext(); return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef); } } return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr); } private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr, STNode rightMostQualifiedNameRef) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) rightMostQualifiedNameRef; STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); STNode simpleNameRef = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix); middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon, endExpr); } private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr, STNode leftMostQualifiedNameRef) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef; STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef); middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon, endExpr); } private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr, boolean isInConditionalExpr) { STNode colon = parseColon(); endContext(); STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, isInConditionalExpr); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } [;] * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); STNode semicolon = parseOptionalSemicolon(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken, semicolon); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) { switch (peek().kind) { case OPEN_BRACE_TOKEN: reportInvalidStatementAnnots(annots, qualifiers); reportInvalidQualifierList(qualifiers); return parseTransactionStatement(transactionKeyword); case COLON_TOKEN: if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } default: Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF); if (solution.action == Action.KEEP || (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code> * * @return Transaction statement node */ private STNode parseTransactionStatement(STNode transactionKeyword) { startContext(ParserRuleContext.TRANSACTION_STMT); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause] * <br/> * retry-spec := [type-parameter] [ `(` arg-list `)` ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(consume()); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse optional on fail clause. * * @return Parsed node */ private STNode parseOptionalOnFailClause() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ON_KEYWORD) { return parseOnFailClause(); } if (isEndOfRegularCompoundStmt(nextToken.kind)) { return STNodeFactory.createEmptyNode(); } recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS); return parseOptionalOnFailClause(); } private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) { switch (nodeKind) { case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case AT_TOKEN: case EOF_TOKEN: return true; default: return isStatementStartingToken(nodeKind); } } private boolean isStatementStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: return true; default: if (isTypeStartingToken(nodeKind)) { return true; } if (isValidExpressionStart(nodeKind, 1)) { return true; } return false; } } /** * Parse on fail clause. * <p> * <code> * on-fail-clause := on fail [typed-binding-pattern] statement-block * </code> * * @return On fail clause node */ private STNode parseOnFailClause() { startContext(ParserRuleContext.ON_FAIL_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typedBindingPattern = parseOnfailOptionalBP(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typedBindingPattern, blockStatement); } private STNode parseOnfailOptionalBP() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return STAbstractNodeFactory.createEmptyNode(); } else if (isTypeStartingToken(nextToken.kind)) { return parseTypedBindingPattern(); } else { recover(nextToken, ParserRuleContext.ON_FAIL_OPTIONAL_BINDING_PATTERN); return parseOnfailOptionalBP(); } } /** * Parse typed binding pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern() { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDescriptor, bindingPattern); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse xml step extends. * <p> * <code> * xml-step-extends := xml-step-extend* * </code> * * @return Parsed node */ private STNode parseXMLStepExtends() { STToken nextToken = peek(); if (isEndOfXMLStepExtend(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> xmlStepExtendList = new ArrayList<>(); startContext(ParserRuleContext.XML_STEP_EXTEND); STNode stepExtension; while (!isEndOfXMLStepExtend(nextToken.kind)) { if (nextToken.kind == SyntaxKind.DOT_TOKEN) { stepExtension = parseXMLStepMethodCallExtend(); } else if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { stepExtension = parseXMLFilterExpressionRhs(); } else { stepExtension = parseXMLIndexedStepExtend(); } xmlStepExtendList.add(stepExtension); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(xmlStepExtendList); } /** * <p> * Parse xml indexed step extension. * <br/> * <code> * xml-indexed-step-extend:= [ expression ] * </code> * </p> * * @return Parsed node */ private STNode parseXMLIndexedStepExtend() { STNode openBracket = parseOpenBracket(); STNode keyExpr = parseKeyExpr(true); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createXMLStepIndexedExtendNode(openBracket, keyExpr, closeBracket); } /** * <p> * Parse xml method call step extension. * <br/> * <code> * xml-method-call-step-extend:= . method-name ( arg-list ) * </code> * </p> * * @return Parsed node */ private STNode parseXMLStepMethodCallExtend() { STNode dotToken = parseDotToken(); STNode methodName = parseMethodName(); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createXMLStepMethodCallExtendNode(dotToken, methodName, parenthesizedArgsList); } private STNode parseMethodName() { if (isSpecialMethodName(peek())) { return getKeywordAsSimpleNameRef(); } return STNodeFactory.createSimpleNameReferenceNode(parseIdentifier(ParserRuleContext.IDENTIFIER)); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private boolean isEndOfXMLStepExtend(SyntaxKind tokenKind) { return switch (tokenKind) { case OPEN_BRACKET_TOKEN, DOT_LT_TOKEN -> false; case DOT_TOKEN -> peek(3).kind != SyntaxKind.OPEN_PAREN_TOKEN; default -> true; }; } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start xml-step-extend*</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); STNode xmlStepExtends = parseXMLStepExtends(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart, xmlStepExtends); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); List<STNode> matchClausesList = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClausesList.add(clause); } STNode matchClauses = STNodeFactory.createNodeList(matchClausesList); if (isNodeListEmpty(matchClauses)) { openBrace = SyntaxErrors.addDiagnostic(openBrace, DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES); } STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case TYPE_KEYWORD: return true; default: return isEndOfStatements(); } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); if (isNodeListEmpty(matchPatterns)) { STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier); matchPatterns = STNodeFactory.createNodeList(constantPattern); DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN; if (matchGuard != null) { matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode); } else { rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode); } } return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { STToken nextToken = peek(); switch (nextToken.kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternListMemberRhs(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | error-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { STToken nextToken = peek(); if (isPredeclaredIdentifier(nextToken.kind)) { STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseErrorMatchPattern(); default: recover(nextToken, ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternListMemberRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS); return parseMatchPatternListMemberRhs(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { break; } STNode invalidField = parseListMatchPatternMember(); updateLastNodeInListWithInvalidNode(matchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); STNode fieldMatchPatterns = parseFieldMatchPatternList(); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken); } private STNode parseFieldMatchPatternList() { List<STNode> fieldMatchPatterns = new ArrayList<>(); STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { return STNodeFactory.createEmptyNodeList(); } fieldMatchPatterns.add(fieldMatchPatternMember); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { invalidateExtraFieldMatchPatterns(fieldMatchPatterns); return STNodeFactory.createNodeList(fieldMatchPatterns); } return parseFieldMatchPatternList(fieldMatchPatterns); } private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) { while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs == null) { break; } fieldMatchPatterns.add(fieldMatchPatternRhs); STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { fieldMatchPatternMember = createMissingFieldMatchPattern(); } fieldMatchPatterns.add(fieldMatchPatternMember); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { invalidateExtraFieldMatchPatterns(fieldMatchPatterns); break; } } return STNodeFactory.createNodeList(fieldMatchPatterns); } private STNode createMissingFieldMatchPattern() { STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN); STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern); fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember, DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER); return fieldMatchPatternMember; } /** * Parse and invalidate all field match pattern members after a rest-match-pattern. * * @param fieldMatchPatterns field-match-patterns list */ private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) { while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs == null) { break; } STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text()); } else { updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null); updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); } } } private STNode parseFieldMatchPatternMember() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); startContext(ParserRuleContext.ERROR_MATCH_PATTERN); return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern ) * error-arg-list-match-pattern := * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns] * | [error-field-match-patterns] * error-message-match-pattern := simple-match-pattern * error-cause-match-pattern := simple-match-pattern | error-match-pattern * simple-match-pattern := * wildcard-match-pattern * | const-pattern * | var variable-name * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * </p> * * @return Parsed functional match pattern node. */ private STNode parseErrorMatchPattern() { startContext(ParserRuleContext.ERROR_MATCH_PATTERN); STNode errorKeyword = consume(); return parseErrorMatchPattern(errorKeyword); } private STNode parseErrorMatchPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS); return parseErrorMatchPattern(errorKeyword); } return parseErrorMatchPattern(errorKeyword, typeRef); } private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesisToken = parseOpenParenthesis(); STNode argListMatchPatternNode = parseErrorArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseErrorArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); if (isEndOfErrorFieldMatchPatterns()) { return STNodeFactory.createNodeList(argListMatchPatterns); } startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG); STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START); endContext(); if (isSimpleMatchPattern(firstArg.kind)) { argListMatchPatterns.add(firstArg); STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS); if (isValidSecondArgMatchPattern(secondArg.kind)) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } else { argListMatchPatterns.add(firstArg); } } parseErrorFieldMatchPatterns(argListMatchPatterns); return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) { switch (matchPatternKind) { case IDENTIFIER_TOKEN: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NUMERIC_LITERAL: case STRING_LITERAL: case NULL_LITERAL: case NIL_LITERAL: case BOOLEAN_LITERAL: case TYPED_BINDING_PATTERN: case UNARY_EXPRESSION: return true; default: return false; } } private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case ERROR_MATCH_PATTERN: case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: return true; default: if (isSimpleMatchPattern(syntaxKind)) { return true; } return false; } } /** * Parse error field match patterns. * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * @param argListMatchPatterns */ private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN; while (!isEndOfErrorFieldMatchPatterns()) { STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListMatchPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldMatchPatterns() { return isEndOfErrorFieldBindingPatterns(); } private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgListMatchPatternEnd(currentCtx); } } private STNode parseErrorArgListMatchPattern(ParserRuleContext context) { STToken nextToken = peek(); if (isPredeclaredIdentifier(nextToken.kind)) { return parseNamedArgOrSimpleMatchPattern(); } switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); case VAR_KEYWORD: STNode varType = createBuiltinSimpleNameReference(consume()); STNode variableName = createCaptureOrWildcardBP(parseVariableName()); return STNodeFactory.createTypedBindingPatternNode(varType, variableName); case CLOSE_PAREN_TOKEN: return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN); default: recover(nextToken, context); return parseErrorArgListMatchPattern(context); } } private STNode parseNamedArgOrSimpleMatchPattern() { STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) { return constRefExpr; } return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name); } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; default: return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED; } } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics())); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @param qualifiers * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { List<STNode> varDeclQualifiers = new ArrayList<>(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true)); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment); } private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; if (isPredeclaredIdentifier(nextToken.kind)) { reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList()); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipeOrAndToken = parseBinaryOperator(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isExpression(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundAssignment(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); SyntaxKind typeOrExprKind = typeOrExpr.kind; if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE || typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) { recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS); } else { recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS); } return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) { lhsType = getTypeDescFromExpr(lhsType); rhsType = getTypeDescFromExpr(rhsType); STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.VARIABLE_NAME); STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP); } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0; } private boolean isDefiniteAction(SyntaxKind kind) { return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 && kind.compareTo(SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode typeDescNode = getTypeDescFromExpr(typeOrExpr); typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypeDescOrExpr(typeDescQualifiers); } private STNode parseTypeDescOrExpr(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers); break; case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList()); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(anonFuncParam, false); default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @param qualifiers Preceding qualifiers * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature; if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) { funcSignature = parseFuncSignature(true); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true); qualifierList = nodes[0]; functionKeyword = nodes[1]; endContext(); return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature); } funcSignature = STNodeFactory.createEmptyNode(); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false); qualifierList = nodes[0]; functionKeyword = nodes[1]; STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) { ParserRuleContext currentCtx = getCurrentContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); } startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipeOrAndToken = parseBinaryOperator(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipeOrAndToken, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr), ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundAssignment(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern, DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP); return STNodeFactory.createCaptureBindingPatternNode(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPattern(typeDescQualifiers, context); } private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); return parseFieldBindingPattern(simpleNameReference); default: recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode simpleNameReference) { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); case COLON_TOKEN: STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); default: recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END); return parseFieldBindingPattern(simpleNameReference); } } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1); } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } return parseErrorArgListBindingPatterns(argListBindingPatterns); } private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) { STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true); if (firstArg == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } switch (firstArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); case ERROR_BINDING_PATTERN: STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier); missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP, DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN); STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN, DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN); argListBindingPatterns.add(missingErrorMsgBP); argListBindingPatterns.add(missingComma); argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); default: addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBindingPatterns(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false); assert secondArg != null; switch (secondArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind); default: updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns, SyntaxKind lastValidArgKind) { while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false); assert currentArg != null; DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: STNode argNameOrSimpleBindingPattern = consume(); return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); case CLOSE_PAREN_TOKEN: if (isFirstArg) { return null; } default: recover(peek(), context); return parseErrorArgListBindingPattern(context, isFirstArg); } } private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) { STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS: break; case NONE: default: STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(getBindingPattern(member, true)); memberList.add(memberEnd); bindingPattern = parseAsListBindingPattern(openBracket, memberList); typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) || isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = getKeyExpr(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE && context != ParserRuleContext.JOIN_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = getKeyExpr(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS; if (isTypedBindingPattern) { recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS; } recover(peek(), recoveryCtx); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode getKeyExpr(STNode member) { if (member == null) { STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier); return STNodeFactory.createNodeList(missingVarRef); } return STNodeFactory.createNodeList(member); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns = STNodeFactory.createEmptyNodeList(); if (!isEmpty(member)) { SyntaxKind memberKind = member.kind; if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) { STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME); STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken); return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName); } STNode bindingPattern = getBindingPattern(member, true); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @return Parsed node */ private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor; STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } if (isTypedBindingPattern) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr); } STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } /** * Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param pipeOrAndToken pipe or bitwise-and token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) { if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); } else { return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); } } /** * Merges two types separated by <code>|</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param pipeToken pipe token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) { if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc); } else { return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc); } } /** * Merges two types separated by <code>&</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param bitwiseAndToken bitwise-and token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc); return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc); } else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc); return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc); } else { rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc); return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc); } } if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc); } else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc); } else { return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc); } } private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken, STUnionTypeDescriptorNode unionTypeDesc) { STNode leftTypeDesc = unionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc); return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken, STUnionTypeDescriptorNode unionTypeDesc) { STNode leftTypeDesc = unionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken, (STUnionTypeDescriptorNode) leftTypeDesc)); } if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc); return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken, STIntersectionTypeDescriptorNode intersectionTypeDesc) { STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc); return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: if (isTypedBindingPattern) { return SyntaxKind.ARRAY_TYPE_DESC; } return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; case ERROR_CONSTRUCTOR: if (isTypedBindingPattern) { return SyntaxKind.LIST_BINDING_PATTERN; } if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; default: if (isTypedBindingPattern) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList() { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, false); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member); return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case MEMBER_TYPE_DESC: case REST_TYPE: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseStatementStartBracketedListMember(typeDescQualifiers); } private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); return parseMemberBracketedList(); case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) { return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: reportInvalidQualifierList(qualifiers); return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: reportInvalidQualifierList(qualifiers); STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPatternOrErrorConstructor(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: reportInvalidQualifierList(qualifiers); return parseRestBindingOrSpreadMember(); case XML_KEYWORD: case STRING_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(qualifiers); case FUNCTION_KEYWORD: return parseAnonFuncExprOrFuncTypeDesc(qualifiers); case AT_TOKEN: return parseTupleMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); return parseStatementStartBracketedListMember(qualifiers); } } private STNode parseRestBindingOrSpreadMember() { STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return STNodeFactory.createRestBindingPatternNode(ellipsis, expr); } else { return STNodeFactory.createSpreadMemberNode(ellipsis, expr); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case REST_TYPE: case MEMBER_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member); return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorConstructorExpr(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); case AT_TOKEN: return parseTupleMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members, false); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
startContext(ParserRuleContext.XML_STEP_EXTEND);
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); return parseConstDecl(metadata, qualifier, constKeyword); } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: endContext(); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: STNode constantDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); endContext(); return constantDecl; default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor given the type. * <p> * <code>optional-type-descriptor := type-descriptor `?`</code> * </p> * * @param typeDescriptorNode Preceding type descriptor * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken); } private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) { if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode; STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken); typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode; STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken); typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { typeDescNode = validateForUsageOfVar(typeDescNode); typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken); } return typeDescNode; } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := array-member-type-descriptor [ [ array-length ] ] * array-member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); if (arrayLengthNode != null) { switch (arrayLengthNode.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind; if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken, arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); arrayLengthNode = STNodeFactory.createEmptyNode(); } } List<STNode> arrayDimensions = new ArrayList(); if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) { STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc; STNode innerArrayDimensions = innerArrayType.dimensions; int dimensionCount = innerArrayDimensions.bucketCount(); for (int i = 0; i < dimensionCount; i++) { arrayDimensions.add(innerArrayDimensions.childInBucket(i)); } memberTypeDesc = innerArrayType.memberTypeDesc; } STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode, closeBracketToken); arrayDimensions.add(arrayDimension); STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the <a href="https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (isPredeclaredIdentifier(peek().kind)) { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } else { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse type test expression. * <code> * type-test-expr := expression (is | !is) type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isOrNotIsKeyword = parseIsOrNotIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor); } /** * Parse `is` keyword or `!is` keyword. * * @return is-keyword or not-is-keyword node */ private STNode parseIsOrNotIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD || token.kind == SyntaxKind.NOT_IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsOrNotIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param expression Action or expression in LHS * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { SyntaxKind nextTokenKind = peek().kind; if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) { return getExpressionAsStatement(expression); } switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case IDENTIFIER_TOKEN: default: if (isCompoundAssignment(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: return parseCallStatement(expression); case CHECK_EXPRESSION: return parseCheckStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: case CLIENT_RESOURCE_ACCESS_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); expression = getExpression(expression); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case SIMPLE_NAME_REFERENCE: STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr; if (nameRef.name.isMissing()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } break; case ASTERISK_LITERAL: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * </p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { return parseCallStatementOrCheckStatement(expression); } /** * <p> * Parse checking statement. * </p> * <code> * checking-stmt := checking-expr ; * <br/> * checking-expr := checking-keyword expr ; * </code> * * @param expression Checking expression associated with the checking statement * @return Checking statement node */ private STNode parseCheckStatement(STNode expression) { return parseCallStatementOrCheckStatement(expression); } private STNode parseCallStatementOrCheckStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse client resource access action, given the starting expression. * <br/><br/> * <code> * client-resource-access-action := expression "->" "/" [resource-access-path] ["." method-name] ["(" arg-list ")"] * </code> * * @param expression Expression * @param rightArrow Right arrow token * @param slashToken Slash token * @return Parsed node */ private STNode parseClientResourceAccessAction(STNode expression, STNode rightArrow, STNode slashToken, boolean isRhsExpr, boolean isInMatchGuard) { startContext(ParserRuleContext.CLIENT_RESOURCE_ACCESS_ACTION); STNode resourceAccessPath = parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard); STNode resourceAccessMethodDot = parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard); STNode resourceAccessMethodName = STNodeFactory.createEmptyNode(); if (resourceAccessMethodDot != null) { resourceAccessMethodName = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); } STNode resourceMethodCallArgList = parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard); endContext(); return STNodeFactory.createClientResourceAccessActionNode(expression, rightArrow, slashToken, resourceAccessPath, resourceAccessMethodDot, resourceAccessMethodName, resourceMethodCallArgList); } private STNode parseOptionalResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) { STNode resourceAccessPath = STNodeFactory.createEmptyNodeList(); STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: resourceAccessPath = parseResourceAccessPath(isRhsExpr, isInMatchGuard); break; case DOT_TOKEN: case OPEN_PAREN_TOKEN: break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_PATH); return parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard); } return resourceAccessPath; } private STNode parseOptionalResourceAccessMethodDot(boolean isRhsExpr, boolean isInMatchGuard) { STNode dotToken = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); switch (nextToken.kind) { case DOT_TOKEN: dotToken = consume(); break; case OPEN_PAREN_TOKEN: break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_METHOD); return parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard); } return dotToken; } private STNode parseOptionalResourceAccessActionArgList(boolean isRhsExpr, boolean isInMatchGuard) { STNode argList = STNodeFactory.createEmptyNode(); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: argList = parseParenthesizedArgList(); break; default: if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) { break; } recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_ACTION_ARG_LIST); return parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard); } return argList; } /** * Parse resource access path. * <br/><br/> * <code> * resource-access-path := * resource-access-segments ["/" resource-access-rest-segment] * | resource-access-rest-segment * <br/><br/> * resource-access-segments := resource-access-segment ("/" resource-access-segment ")* * <br/><br/> * resource-access-segment := resource-path-segment-name | computed-resource-access-segment * <br/><br/> * resource-path-segment-name := identifier * </code> * @return */ private STNode parseResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) { List<STNode> pathSegmentList = new ArrayList<>(); STNode pathSegment = parseResourceAccessSegment(); pathSegmentList.add(pathSegment); STNode leadingSlash; STNode previousPathSegmentNode = pathSegment; while (!isEndOfResourceAccessPathSegments(peek(), isRhsExpr, isInMatchGuard)) { leadingSlash = parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard); if (leadingSlash == null) { break; } pathSegment = parseResourceAccessSegment(); if (previousPathSegmentNode.kind == SyntaxKind.RESOURCE_ACCESS_REST_SEGMENT) { updateLastNodeInListWithInvalidNode(pathSegmentList, leadingSlash, null); updateLastNodeInListWithInvalidNode(pathSegmentList, pathSegment, DiagnosticErrorCode.RESOURCE_ACCESS_SEGMENT_IS_NOT_ALLOWED_AFTER_REST_SEGMENT); } else { pathSegmentList.add(leadingSlash); pathSegmentList.add(pathSegment); previousPathSegmentNode = pathSegment; } } return STNodeFactory.createNodeList(pathSegmentList); } private STNode parseResourceAccessSegment() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return consume(); case OPEN_BRACKET_TOKEN: return parseComputedOrResourceAccessRestSegment(consume()); default: recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_PATH_SEGMENT); return parseResourceAccessSegment(); } } /** * Parse computed resource segment or resource access rest segment. * <code> * <br/> * computed-resource-access-segment := "[" expression "]" * <br/> * resource-access-rest-segment := "[" "..." expression "]" * </code> * @param openBracket Open bracket token * @return Parsed node */ private STNode parseComputedOrResourceAccessRestSegment(STNode openBracket) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STNode ellipsisToken = consume(); STNode expression = parseExpression(); STNode closeBracketToken = parseCloseBracket(); return STNodeFactory.createResourceAccessRestSegmentNode(openBracket, ellipsisToken, expression, closeBracketToken); default: if (isValidExprStart(nextToken.kind)) { expression = parseExpression(); closeBracketToken = parseCloseBracket(); return STNodeFactory.createComputedResourceAccessSegmentNode(openBracket, expression, closeBracketToken); } recover(nextToken, ParserRuleContext.COMPUTED_SEGMENT_OR_REST_SEGMENT); return parseComputedOrResourceAccessRestSegment(openBracket); } } /** * Parse resource access segment end. * * @return Parsed node */ private STNode parseResourceAccessSegmentRhs(boolean isRhsExpr, boolean isInMatchGuard) { STToken nextToken = peek(); switch (nextToken.kind) { case SLASH_TOKEN: return consume(); default: if (isEndOfResourceAccessPathSegments(nextToken, isRhsExpr, isInMatchGuard)) { return null; } recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_SEGMENT_RHS); return parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard); } } private boolean isEndOfResourceAccessPathSegments(STToken nextToken, boolean isRhsExpr, boolean isInMatchGuard) { switch (nextToken.kind) { case DOT_TOKEN: case OPEN_PAREN_TOKEN: return true; default: return isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard); } } private STNode parseRemoteMethodCallOrClientResourceAccessOrAsyncSendAction(STNode expression, boolean isRhsExpr, boolean isInMatchGuard) { STNode rightArrow = parseRightArrow(); return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard); } private STNode parseClientResourceAccessOrAsyncSendActionRhs(STNode expression, STNode rightArrow, boolean isRhsExpr, boolean isInMatchGuard) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword); return parseAsyncSendAction(expression, rightArrow, name); case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; case SLASH_TOKEN: STNode slashToken = consume(); return parseClientResourceAccessAction(expression, rightArrow, slashToken, isRhsExpr, isInMatchGuard); default: if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || isEndOfActionOrExpression(nextNextToken, isRhsExpr, isInMatchGuard) || nextToken.isMissing()) { name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; } } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.REMOTE_OR_RESOURCE_CALL_OR_ASYNC_SEND_RHS); if (solution.action == Action.KEEP) { name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; } return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: case CLOSE_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case COMMA_TOKEN: case FROM_KEYWORD: case JOIN_KEYWORD: case ON_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case ORDER_KEYWORD: case LIMIT_KEYWORD: case SELECT_KEYWORD: return parseAsyncSendAction(expression, rightArrow, name); default: if (isGroupOrCollectKeyword(nextToken)) { return parseAsyncSendAction(expression, rightArrow, name); } recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } /** * Parse remote method call action. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param expression LHS expression * @param rightArrow right arrow token * @param name remote method name * @return */ private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseArgListOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseArgListCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse map type descriptor. * map-type-descriptor := `map` type-parameter * * @return Parsed node */ private STNode parseMapTypeDescriptor(STNode mapKeyword) { STNode typeParameter = parseTypeParameter(); return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter); } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := `typedesc` [type-parameter] * <br/>&nbsp;| `future` [type-parameter] * <br/>&nbsp;| `xml` [type-parameter] * <br/>&nbsp;| `error` [type-parameter] * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor(STNode keywordToken) { STNode typeParamNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typeParamNode = parseTypeParameter(); } else { typeParamNode = STNodeFactory.createEmptyNode(); } SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken, typeParamNode); } private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) { switch (keywordToken.kind) { case TYPEDESC_KEYWORD: return SyntaxKind.TYPEDESC_TYPE_DESC; case FUTURE_KEYWORD: return SyntaxKind.FUTURE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case ERROR_KEYWORD: default: return SyntaxKind.ERROR_TYPE_DESC; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * type * | class * | [object|service remote] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD); STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent); attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList); attachPoint = SyntaxErrors.addDiagnostic(attachPoint, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN && !this.tokenReader.head().hasTrailingNewline()) { STToken nextNonVirtualToken = this.tokenReader.read(); updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text()); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: case CLASS_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := type | class | [object|service remote] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case SERVICE_KEYWORD: return parseServiceAttachPoint(sourceKeyword, firstIdent); case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case FIELD_KEYWORD: case CLASS_KEYWORD: default: STNode identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); } /** * Parse remote ident. * * @return Parsed node */ private STNode parseRemoteIdent() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_IDENT); return parseRemoteIdent(); } } /** * Parse service attach point. * <code>service-attach-point := service | service remote function</code> * * @return Parsed node */ private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) { STNode identList; STToken token = peek(); switch (token.kind) { case REMOTE_KEYWORD: STNode secondIdent = parseRemoteIdent(); STNode thirdIdent = parseFunctionIdent(); identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); case COMMA_TOKEN: case SEMICOLON_TOKEN: identList = STNodeFactory.createNodeList(firstIdent); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList); default: recover(token, ParserRuleContext.SERVICE_IDENT_RHS); return parseServiceAttachPoint(sourceKeyword, firstIdent); } } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { STToken nextToken = peek(); switch (nextToken.kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: if (isPredeclaredIdentifier(nextToken.kind)) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt } * [on-fail-clause]</code> * * @param annots Annotations attached to the worker decl * @param qualifiers Preceding transactional keyword in a list * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode transactionalKeyword = getTransactionalKeyword(qualifiers); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName, returnTypeDesc, workerBody, onFailClause); } private STNode getTransactionalKeyword(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode transactionalKeyword; if (validatedList.isEmpty()) { transactionalKeyword = STNodeFactory.createEmptyNode(); } else { transactionalKeyword = validatedList.get(0); } return transactionalKeyword; } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt [on-fail-clause]</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false, TypePrecedence.UNION); return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Creates a union type descriptor after validating lhs and rhs types. * <p> * <i>Note: Since type precedence and associativity are not taken into account here, * this method should not be called directly when types are unknown. * <br/> * Call {@link * * @param leftTypeDesc lhs type * @param pipeToken pipe token * @param rightTypeDesc rhs type * @return a UnionTypeDescriptorNode */ private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { return isTypeStartingToken(nodeKind, getNextNextToken()); } private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: case TRANSACTION_KEYWORD: return true; default: if (isParameterizedTypeToken(nextTokenKind)) { return true; } if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) { return true; } return isSimpleType(nextTokenKind); } } /** * Check if the token kind is a type descriptor in terminal expression. * <p> * simple-type-in-expr := * boolean | int | byte | float | decimal | string | handle | json | anydata | any | never * * @param nodeKind token kind to check * @return <code>true</code> for simple type token in expression. <code>false</code> otherwise. */ private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) { switch (nodeKind) { case VAR_KEYWORD: case READONLY_KEYWORD: return false; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case VAR_KEYWORD: case READONLY_KEYWORD: return true; default: return false; } } static boolean isPredeclaredPrefix(SyntaxKind nodeKind) { switch (nodeKind) { case BOOLEAN_KEYWORD: case DECIMAL_KEYWORD: case ERROR_KEYWORD: case FLOAT_KEYWORD: case FUNCTION_KEYWORD: case FUTURE_KEYWORD: case INT_KEYWORD: case MAP_KEYWORD: case OBJECT_KEYWORD: case STREAM_KEYWORD: case STRING_KEYWORD: case TABLE_KEYWORD: case TRANSACTION_KEYWORD: case TYPEDESC_KEYWORD: case XML_KEYWORD: return true; default: return false; } } private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) { return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN; } private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; default: assert false : typeKeyword + " is not a built-in type"; return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (validateStatement(stmt)) { continue; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ list-members ] ] * <br/> * list-members := list-member (, list-member)* * <br/> * list-member := expression | spread-member * <br/> * spread-member := ... expression * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode listMembers = parseListMembers(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket); } /** * Parse optional list member list. * * @return Parsed node */ private STNode parseListMembers() { List<STNode> listMembers = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode listMember = parseListMember(); listMembers.add(listMember); return parseListMembers(listMembers); } private STNode parseListMembers(List<STNode> listMembers) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } listMembers.add(listConstructorMemberEnd); STNode listMember = parseListMember(); listMembers.add(listMember); } return STNodeFactory.createNodeList(listMembers); } /** * Parse list member. * <p> * <code> * list-member := expression | spread-member * </code> * * @return Parsed node */ private STNode parseListMember() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { return parseSpreadMember(); } else { return parseExpression(); } } /** * Parse spread member. * <p> * <code> * spread-member := ... expression * </code> * * @return Parsed node */ private STNode parseSpreadMember() { STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadMemberNode(ellipsis, expr); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr); } private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } private STToken getUnderscoreKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * <br/> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) { STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr, false); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken, getNextNextToken())) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(context, isRhsExpr, allowActions); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken, getNextNextToken())) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(context, isRhsExpr, allowActions); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } static boolean isEndOfLetVarDeclarations(STToken nextToken, STToken nextNextToken) { SyntaxKind tokenKind = nextToken.kind; switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return isGroupOrCollectKeyword(nextToken) || !isTypeStartingToken(tokenKind, nextNextToken); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = context == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL ? parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions) : parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { return createMissingTemplateExpressionNode(xmlKeyword, SyntaxKind.XML_TEMPLATE_EXPRESSION); } STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse regular expression constructor. * <p> * <code>regexp-constructor-expr := re BacktickString</code> * * @return Regular expression template expression */ private STNode parseRegExpTemplateExpression() { STNode reKeyword = consume(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { return createMissingTemplateExpressionNode(reKeyword, SyntaxKind.REGEX_TEMPLATE_EXPRESSION); } STNode content = parseTemplateContentAsRegExp(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.REGEX_TEMPLATE_EXPRESSION, reKeyword, startingBackTick, content, endingBackTick); } private STNode createMissingTemplateExpressionNode(STNode reKeyword, SyntaxKind kind) { STNode startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STAbstractNodeFactory.createEmptyNodeList(); STNode templateExpr = STNodeFactory.createTemplateExpressionNode(kind, reKeyword, startingBackTick, content, endingBackTick); templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING); return templateExpr; } /** * Parse the content of the template string as regular expression. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as regular expression. * * @return Template expression node */ private STNode parseTemplateContentAsRegExp() { this.tokenReader.startMode(ParserMode.REGEXP); ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder regExpStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { regExpStringBuilder.append(((STToken) contentItem).text()); } else { regExpStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } this.tokenReader.endMode(); CharReader charReader = CharReader.from(regExpStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new RegExpLexer(charReader)); RegExpParser regExpParser = new RegExpParser(tokenReader, expressions); return regExpParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (!isEndOfInterpolation()) { STToken nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } private boolean isEndOfInterpolation() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: ParserMode currentLexerMode = this.tokenReader.getCurrentMode(); return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION && currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT; } } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor(STNode tableKeywordToken) { STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code> * function-type-descriptor := function-quals function function-signature * <br/>&nbsp;| [isolated] function * <br/> * function-quals := (transactional | isolated)* * </code> * * @param qualifiers Preceding type descriptor qualifiers * @return Function type descriptor node */ private STNode parseFunctionTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); boolean hasFuncSignature = false; STNode signature = STNodeFactory.createEmptyNode(); if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN || isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) { signature = parseFuncSignature(true); hasFuncSignature = true; } STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature); STNode qualifierList = nodes[0]; functionKeyword = nodes[1]; endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature); } private STNode getLastNodeInList(List<STNode> nodeList) { return nodeList.get(nodeList.size() - 1); } private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword, boolean hasFuncSignature) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) { validatedList.add(qualifier); } else if (qualifierList.size() == nextIndex) { functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } else { updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } STNode nodeList = STNodeFactory.createNodeList(validatedList); return new STNode[]{ nodeList, functionKeyword }; } private boolean isRegularFuncQual(SyntaxKind tokenKind) { switch (tokenKind) { case ISOLATED_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := * [annots] (isolated| transactional) function function-signature anon-func-body</code> * * @param annots Annotations. * @param qualifiers Function qualifiers * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true); STNode qualifierList = nodes[0]; funcKeyword = nodes[1]; STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; case NIL_LITERAL: STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params; params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken, STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param bracedExpression Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) { List<STNode> paramList = new ArrayList<>(); STNode innerExpression = bracedExpression.expression; STNode openParen = bracedExpression.openParen; if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { paramList.add(innerExpression); } else { openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); } return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, STNodeFactory.createNodeList(paramList), bracedExpression.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * member-type-descriptor := [annots] type-descriptor * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TUPLE_MEMBERS); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTupleMember(); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode firstMember, List<STNode> memberList) { STNode tupleMemberRhs; while (!isEndOfTypeList(peek().kind)) { if (firstMember.kind == SyntaxKind.REST_TYPE) { firstMember = invalidateTypeDescAfterRestDesc(firstMember); break; } tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } memberList.add(firstMember); memberList.add(tupleMemberRhs); firstMember = parseTupleMember(); } memberList.add(firstMember); return STNodeFactory.createNodeList(memberList); } private STNode parseTupleMember() { STNode annot = parseOptionalAnnotations(); STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); return createMemberOrRestNode(annot, typeDesc); } private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) { STNode tupleMemberRhs = parseTypeDescInTupleRhs(); if (tupleMemberRhs != null) { if (!((STNodeList) annot).isEmpty()) { typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot, DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR); } return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); } return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc); } private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) { while (!isEndOfTypeList(peek().kind)) { STNode tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null); restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(), DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR); } return restDescriptor; } private STNode parseTupleMemberRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS); return parseTupleMemberRhs(); } } private STNode parseTypeDescInTupleRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTypeDescInTupleRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? * <br/> * query-construct-type := table key-specifier | stream | map * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr, boolean allowActions) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr, allowActions); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr, boolean allowActions) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions); case STREAM_KEYWORD: case MAP_KEYWORD: STNode streamOrMapKeyword = consume(); STNode keySpecifier = STNodeFactory.createEmptyNode(); queryConstructType = parseQueryConstructType(streamOrMapKeyword, keySpecifier); return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START); return parseTableConstructorOrQueryInternal(isRhsExpr, allowActions); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr, boolean allowActions) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr, boolean allowActions) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr, allowActions); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream | map</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query action or expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr, boolean allowActions) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr, allowActions); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; STNode collectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr, allowActions); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } else if (collectClause != null) { collectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(collectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_COLLECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else if (intermediateClause.kind == SyntaxKind.COLLECT_CLAUSE) { collectClause = intermediateClause; } else { clauses.add(intermediateClause); continue; } if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek())) { break; } } if (peek().kind == SyntaxKind.DO_KEYWORD && (!isNestedQueryExpr() || (selectClause == null && collectClause == null))) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, collectClause); } if (selectClause == null && collectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause == null ? collectClause : selectClause, onConflictClause); } /** * Check whether currently parsing query expr is a nested query expression. * * @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise. */ private boolean isNestedQueryExpr() { return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1; } private boolean isValidIntermediateQueryStart(STToken token) { switch (token.kind) { case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case JOIN_KEYWORD: case OUTER_KEYWORD: case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: case LIMIT_KEYWORD: return true; case IDENTIFIER_TOKEN: return isGroupOrCollectKeyword(token); default: return false; } } private static boolean isGroupOrCollectKeyword(STToken nextToken) { return isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken) || isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken); } static boolean isKeywordMatch(SyntaxKind syntaxKind, STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && syntaxKind.stringValue().equals(token.text()); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr, boolean allowActions) { STToken nextToken = peek(); switch (nextToken.kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr, allowActions); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr, allowActions); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr, allowActions); case JOIN_KEYWORD: case OUTER_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case LIMIT_KEYWORD: return parseLimitClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: return null; default: if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken)) { return parseCollectClause(isRhsExpr); } if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken)) { return parseGroupByClause(isRhsExpr); } recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS); return parseIntermediateClause(isRhsExpr, allowActions); } } private STNode parseCollectClause(boolean isRhsExpr) { startContext(ParserRuleContext.COLLECT_CLAUSE); STNode collectKeyword = parseCollectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createCollectClauseNode(collectKeyword, expression); } /** * Parse collect-keyword. * * @return collect-keyword node */ private STNode parseCollectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COLLECT_KEYWORD) { return consume(); } if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, token)) { return getCollectKeyword(consume()); } recover(token, ParserRuleContext.COLLECT_KEYWORD); return parseCollectKeyword(); } private STNode getCollectKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.COLLECT_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: case ON_KEYWORD: case CONFLICT_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr, boolean allowActions) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse limit-keyword. * * @return limit-keyword node */ private STNode parseLimitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LIMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LIMIT_KEYWORD); return parseLimitKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr, boolean allowActions) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr, allowActions); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse group by clause. * <code>group-by-clause := group by grouping-key-list</code> * * @return Parsed node */ private STNode parseGroupByClause(boolean isRhsExpr) { startContext(ParserRuleContext.GROUP_BY_CLAUSE); STNode groupKeyword = parseGroupKeyword(); STNode byKeyword = parseByKeyword(); STNode groupingKeys = parseGroupingKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(groupingKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_GROUPING_KEY); endContext(); return STNodeFactory.createGroupByClauseNode(groupKeyword, byKeyword, groupingKeys); } /** * Parse group-keyword. * * @return group-keyword node */ private STNode parseGroupKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.GROUP_KEYWORD) { return consume(); } if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, token)) { return getGroupKeyword(consume()); } recover(token, ParserRuleContext.GROUP_KEYWORD); return parseGroupKeyword(); } private STNode getGroupKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.GROUP_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse grouping key. * <code>grouping-key-list := grouping-key ["," grouping-key]*</code> * * @return Parsed node */ private STNode parseGroupingKeyList(boolean isRhsExpr) { List<STNode> groupingKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfGroupByKeyListElement(nextToken)) { return STNodeFactory.createEmptyNodeList(); } STNode groupingKey = parseGroupingKey(isRhsExpr); groupingKeys.add(groupingKey); nextToken = peek(); STNode groupingKeyListMemberEnd; while (!isEndOfGroupByKeyListElement(nextToken)) { groupingKeyListMemberEnd = parseGroupingKeyListMemberEnd(); if (groupingKeyListMemberEnd == null) { break; } groupingKeys.add(groupingKeyListMemberEnd); groupingKey = parseGroupingKey(isRhsExpr); groupingKeys.add(groupingKey); nextToken = peek(); } return STNodeFactory.createNodeList(groupingKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfGroupByKeyListElement(STToken nextToken) { switch (nextToken.kind) { case COMMA_TOKEN: return false; case EOF_TOKEN: return true; default: return isQueryClauseStartToken(nextToken); } } private boolean isEndOfOrderKeys(STToken nextToken) { switch (nextToken.kind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(nextToken); } } private boolean isQueryClauseStartToken(STToken nextToken) { switch (nextToken.kind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; case IDENTIFIER_TOKEN: return isGroupOrCollectKeyword(nextToken); default: return false; } } private STNode parseGroupingKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return consume(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken)) { return null; } recover(peek(), ParserRuleContext.GROUPING_KEY_LIST_ELEMENT_END); return parseGroupingKeyListMemberEnd(); } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } private STNode parseGroupingKeyVariableDeclaration(boolean isRhsExpr) { STNode groupingKeyElementTypeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER_IN_GROUPING_KEY); startContext(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STNode groupingKeySimpleBP = createCaptureOrWildcardBP(parseVariableName()); endContext(); STNode equalsToken = parseAssignOp(); STNode groupingKeyExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createGroupingKeyVarDeclarationNode(groupingKeyElementTypeDesc, groupingKeySimpleBP, equalsToken, groupingKeyExpression); } /** * Parse grouping key. * <code>grouping-key := variable-name | inferable-type-descriptor variable-name "=" expression</code> * * @return Parsed node */ private STNode parseGroupingKey(boolean isRhsExpr) { STToken nextToken = peek(); SyntaxKind nextTokenKind = nextToken.kind; if (nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && !isPossibleGroupingKeyVarDeclaration()) { return STNodeFactory.createSimpleNameReferenceNode(parseVariableName()); } else if (isTypeStartingToken(nextTokenKind, nextToken)) { return parseGroupingKeyVariableDeclaration(isRhsExpr); } recover(nextToken, ParserRuleContext.GROUPING_KEY_LIST_ELEMENT); return parseGroupingKey(isRhsExpr); } private boolean isPossibleGroupingKeyVarDeclaration() { SyntaxKind nextNextTokenKind = getNextNextToken().kind; return nextNextTokenKind == SyntaxKind.EQUAL_TOKEN || nextNextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && peek(3).kind == SyntaxKind.EQUAL_TOKEN; } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr, boolean allowActions) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); endContext(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STNode limitKeyword = parseLimitKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := `on` expression `equals` expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STToken nextToken = peek(); if (isQueryClauseStartToken(nextToken)) { return createMissingOnClauseNode(); } startContext(ParserRuleContext.ON_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); endContext(); STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } private STNode createMissingOnClauseNode() { STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD, DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD); STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case FIELD_ACCESS: case ASYNC_SEND_ACTION: expr = generateValidExprForStartAction(expr); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, STNodeFactory.createEmptyNodeList(), closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } private STNode generateValidExprForStartAction(STNode expr) { STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); switch (expr.kind) { case FIELD_ACCESS: STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr; return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression, fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments, closeParenToken); case ASYNC_SEND_ACTION: STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr; return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression, asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments, closeParenToken); default: return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); } } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false, TypePrecedence.INTERSECTION); return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Creates an intersection type descriptor after validating lhs and rhs types. * <p> * <i>Note: Since type precedence and associativity are not taken into account here, * this method should not be called directly when types are unknown. * <br/> * Call {@link * * @param leftTypeDesc lhs type * @param bitwiseAndToken bitwise-and token * @param rightTypeDesc rhs type * @return an IntersectionTypeDescriptorNode */ private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) { switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) { nextTokenIndex++; return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); } return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case BASE16_KEYWORD: case BASE64_KEYWORD: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case TABLE_KEYWORD: case MAP_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: case RE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | function</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case FUNCTION_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action | alternate-receive-action</code> * <p><code> * single-receive-action := <- peer-worker * <br></br> * multiple-receive-action := <- { receive-field (, receive-field)* } * <br></br> * alternate-receive-action := <- peer-worker (| peer-worker)* * </code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case FUNCTION_KEYWORD: case IDENTIFIER_TOKEN: return parseSingleOrAlternateReceiveWorkers(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } private STNode parseSingleOrAlternateReceiveWorkers() { startContext(ParserRuleContext.SINGLE_OR_ALTERNATE_WORKER); List<STNode> workers = new ArrayList<>(); STNode peerWorker = parsePeerWorkerName(); workers.add(peerWorker); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.PIPE_TOKEN) { endContext(); return peerWorker; } while (nextToken.kind == SyntaxKind.PIPE_TOKEN) { STNode pipeToken = consume(); workers.add(pipeToken); peerWorker = parsePeerWorkerName(); workers.add(peerWorker); nextToken = peek(); } endContext(); return STNodeFactory.createAlternateReceiveNode(STNodeFactory.createNodeList(workers)); } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case FUNCTION_KEYWORD: STNode functionKeyword = consume(); return STNodeFactory.createSimpleNameReferenceNode(functionKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createReceiveFieldNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * This method should only be called by seeing a `DOUBLE_GT_TOKEN` or * by seeing a `GT_TOKEN` followed by a `GT_TOKEN` * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode firstToken = consume(); if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) { return firstToken; } STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(firstToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or * by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode firstToken = consume(); if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) { return firstToken; } STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, firstToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(firstToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { STToken nextToken = peek(); if (!isPredeclaredIdentifier(nextToken.kind)) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); return parseQualifiedIdentifier(identifier, isInConditionalExpr); } return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, STNode collectClause) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } if (collectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, collectClause, DiagnosticErrorCode.ERROR_COLLECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @param isInConditionalExpr whether calling from a conditional-expr * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); if (peek().kind != SyntaxKind.COLON_TOKEN) { if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) { STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr; STNode innerMiddleExpr = innerConditionalExpr.middleExpression; STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false); if (rightMostQNameRef != null) { middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression, innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef); endContext(); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, innerConditionalExpr.colonToken, innerConditionalExpr.endExpression); } STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true); if (leftMostQNameRef != null) { middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression, innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef); endContext(); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, innerConditionalExpr.colonToken, innerConditionalExpr.endExpression); } } STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false); if (rightMostQNameRef != null) { endContext(); return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef); } STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true); if (leftMostQNameRef != null) { endContext(); return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef); } } return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr); } private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr, STNode rightMostQualifiedNameRef) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) rightMostQualifiedNameRef; STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); STNode simpleNameRef = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix); middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon, endExpr); } private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr, STNode leftMostQualifiedNameRef) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef; STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef); middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon, endExpr); } private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr, boolean isInConditionalExpr) { STNode colon = parseColon(); endContext(); STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, isInConditionalExpr); return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } [;] * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); STNode semicolon = parseOptionalSemicolon(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken, semicolon); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) { switch (peek().kind) { case OPEN_BRACE_TOKEN: reportInvalidStatementAnnots(annots, qualifiers); reportInvalidQualifierList(qualifiers); return parseTransactionStatement(transactionKeyword); case COLON_TOKEN: if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } default: Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF); if (solution.action == Action.KEEP || (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) { STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false); return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false); } return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code> * * @return Transaction statement node */ private STNode parseTransactionStatement(STNode transactionKeyword) { startContext(ParserRuleContext.TRANSACTION_STMT); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause] * <br/> * retry-spec := [type-parameter] [ `(` arg-list `)` ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(consume()); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } /** * Parse optional on fail clause. * * @return Parsed node */ private STNode parseOptionalOnFailClause() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ON_KEYWORD) { return parseOnFailClause(); } if (isEndOfRegularCompoundStmt(nextToken.kind)) { return STNodeFactory.createEmptyNode(); } recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS); return parseOptionalOnFailClause(); } private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) { switch (nodeKind) { case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case AT_TOKEN: case EOF_TOKEN: return true; default: return isStatementStartingToken(nodeKind); } } private boolean isStatementStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: return true; default: if (isTypeStartingToken(nodeKind)) { return true; } if (isValidExpressionStart(nodeKind, 1)) { return true; } return false; } } /** * Parse on fail clause. * <p> * <code> * on-fail-clause := on fail [typed-binding-pattern] statement-block * </code> * * @return On fail clause node */ private STNode parseOnFailClause() { startContext(ParserRuleContext.ON_FAIL_CLAUSE); STNode onKeyword = parseOnKeyword(); STNode failKeyword = parseFailKeyword(); STNode typedBindingPattern = parseOnfailOptionalBP(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typedBindingPattern, blockStatement); } private STNode parseOnfailOptionalBP() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return STAbstractNodeFactory.createEmptyNode(); } else if (isTypeStartingToken(nextToken.kind)) { return parseTypedBindingPattern(); } else { recover(nextToken, ParserRuleContext.ON_FAIL_OPTIONAL_BINDING_PATTERN); return parseOnfailOptionalBP(); } } /** * Parse typed binding pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern() { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDescriptor, bindingPattern); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse xml step extends. * <p> * <code> * xml-step-extends := xml-step-extend* * </code> * * @return Parsed node */ private STNode parseXMLStepExtends() { STToken nextToken = peek(); if (isEndOfXMLStepExtend(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> xmlStepExtendList = new ArrayList<>(); startContext(ParserRuleContext.XML_STEP_EXTENDS); STNode stepExtension; while (!isEndOfXMLStepExtend(nextToken.kind)) { if (nextToken.kind == SyntaxKind.DOT_TOKEN) { stepExtension = parseXMLStepMethodCallExtend(); } else if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { stepExtension = parseXMLFilterExpressionRhs(); } else { stepExtension = parseXMLIndexedStepExtend(); } xmlStepExtendList.add(stepExtension); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(xmlStepExtendList); } /** * <p> * Parse xml indexed step extension. * <br/> * <code> * xml-indexed-step-extend:= [ expression ] * </code> * </p> * * @return Parsed node */ private STNode parseXMLIndexedStepExtend() { startContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR); STNode openBracket = parseOpenBracket(); STNode keyExpr = parseKeyExpr(true); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createXMLStepIndexedExtendNode(openBracket, keyExpr, closeBracket); } /** * <p> * Parse xml method call step extension. * <br/> * <code> * xml-method-call-step-extend:= . method-name ( arg-list ) * </code> * </p> * * @return Parsed node */ private STNode parseXMLStepMethodCallExtend() { STNode dotToken = parseDotToken(); STNode methodName = parseMethodName(); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createXMLStepMethodCallExtendNode(dotToken, methodName, parenthesizedArgsList); } private STNode parseMethodName() { if (isSpecialMethodName(peek())) { return getKeywordAsSimpleNameRef(); } return STNodeFactory.createSimpleNameReferenceNode(parseIdentifier(ParserRuleContext.IDENTIFIER)); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private boolean isEndOfXMLStepExtend(SyntaxKind tokenKind) { return switch (tokenKind) { case OPEN_BRACKET_TOKEN, DOT_LT_TOKEN -> false; case DOT_TOKEN -> peek(3).kind != SyntaxKind.OPEN_PAREN_TOKEN; default -> true; }; } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start xml-step-extend*</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); STNode xmlStepExtends = parseXMLStepExtends(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart, xmlStepExtends); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); List<STNode> matchClausesList = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClausesList.add(clause); } STNode matchClauses = STNodeFactory.createNodeList(matchClausesList); if (isNodeListEmpty(matchClauses)) { openBrace = SyntaxErrors.addDiagnostic(openBrace, DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES); } STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case TYPE_KEYWORD: return true; default: return isEndOfStatements(); } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); if (isNodeListEmpty(matchPatterns)) { STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier); matchPatterns = STNodeFactory.createNodeList(constantPattern); DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN; if (matchGuard != null) { matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode); } else { rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode); } } return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { STToken nextToken = peek(); switch (nextToken.kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternListMemberRhs(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | error-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { STToken nextToken = peek(); if (isPredeclaredIdentifier(nextToken.kind)) { STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseErrorMatchPattern(); default: recover(nextToken, ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternListMemberRhs() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS); return parseMatchPatternListMemberRhs(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { isEndOfFields = true; break; } if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null); if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { break; } STNode invalidField = parseListMatchPatternMember(); updateLastNodeInListWithInvalidNode(matchPatternList, invalidField, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); STNode fieldMatchPatterns = parseFieldMatchPatternList(); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken); } private STNode parseFieldMatchPatternList() { List<STNode> fieldMatchPatterns = new ArrayList<>(); STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { return STNodeFactory.createEmptyNodeList(); } fieldMatchPatterns.add(fieldMatchPatternMember); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { invalidateExtraFieldMatchPatterns(fieldMatchPatterns); return STNodeFactory.createNodeList(fieldMatchPatterns); } return parseFieldMatchPatternList(fieldMatchPatterns); } private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) { while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs == null) { break; } fieldMatchPatterns.add(fieldMatchPatternRhs); STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { fieldMatchPatternMember = createMissingFieldMatchPattern(); } fieldMatchPatterns.add(fieldMatchPatternMember); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { invalidateExtraFieldMatchPatterns(fieldMatchPatterns); break; } } return STNodeFactory.createNodeList(fieldMatchPatterns); } private STNode createMissingFieldMatchPattern() { STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN); STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier); STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern); fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember, DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER); return fieldMatchPatternMember; } /** * Parse and invalidate all field match pattern members after a rest-match-pattern. * * @param fieldMatchPatterns field-match-patterns list */ private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) { while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs == null) { break; } STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember == null) { updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text()); } else { updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null); updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember, DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN); } } } private STNode parseFieldMatchPatternMember() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); startContext(ParserRuleContext.ERROR_MATCH_PATTERN); return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN); return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern ) * error-arg-list-match-pattern := * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns] * | [error-field-match-patterns] * error-message-match-pattern := simple-match-pattern * error-cause-match-pattern := simple-match-pattern | error-match-pattern * simple-match-pattern := * wildcard-match-pattern * | const-pattern * | var variable-name * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * </p> * * @return Parsed functional match pattern node. */ private STNode parseErrorMatchPattern() { startContext(ParserRuleContext.ERROR_MATCH_PATTERN); STNode errorKeyword = consume(); return parseErrorMatchPattern(errorKeyword); } private STNode parseErrorMatchPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS); return parseErrorMatchPattern(errorKeyword); } return parseErrorMatchPattern(errorKeyword, typeRef); } private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesisToken = parseOpenParenthesis(); STNode argListMatchPatternNode = parseErrorArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseErrorArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); if (isEndOfErrorFieldMatchPatterns()) { return STNodeFactory.createNodeList(argListMatchPatterns); } startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG); STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START); endContext(); if (isSimpleMatchPattern(firstArg.kind)) { argListMatchPatterns.add(firstArg); STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS); if (isValidSecondArgMatchPattern(secondArg.kind)) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED); } else { argListMatchPatterns.add(firstArg); } } parseErrorFieldMatchPatterns(argListMatchPatterns); return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) { switch (matchPatternKind) { case IDENTIFIER_TOKEN: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NUMERIC_LITERAL: case STRING_LITERAL: case NULL_LITERAL: case NIL_LITERAL: case BOOLEAN_LITERAL: case TYPED_BINDING_PATTERN: case UNARY_EXPRESSION: return true; default: return false; } } private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case ERROR_MATCH_PATTERN: case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: return true; default: if (isSimpleMatchPattern(syntaxKind)) { return true; } return false; } } /** * Parse error field match patterns. * error-field-match-patterns := * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern] * | rest-match-pattern * named-arg-match-pattern := arg-name = match-pattern * @param argListMatchPatterns */ private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN; while (!isEndOfErrorFieldMatchPatterns()) { STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(argEnd); argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListMatchPatterns.isEmpty()) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldMatchPatterns() { return isEndOfErrorFieldBindingPatterns(); } private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgListMatchPatternEnd(currentCtx); } } private STNode parseErrorArgListMatchPattern(ParserRuleContext context) { STToken nextToken = peek(); if (isPredeclaredIdentifier(nextToken.kind)) { return parseNamedArgOrSimpleMatchPattern(); } switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); case VAR_KEYWORD: STNode varType = createBuiltinSimpleNameReference(consume()); STNode variableName = createCaptureOrWildcardBP(parseVariableName()); return STNodeFactory.createTypedBindingPatternNode(varType, variableName); case CLOSE_PAREN_TOKEN: return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN); default: recover(nextToken, context); return parseErrorArgListMatchPattern(context); } } private STNode parseNamedArgOrSimpleMatchPattern() { STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) { return constRefExpr; } return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name); } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_MATCH_PATTERN: case REST_MATCH_PATTERN: if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; default: return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED; } } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics())); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @param qualifiers * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { List<STNode> varDeclQualifiers = new ArrayList<>(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true)); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment); } private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; if (isPredeclaredIdentifier(nextToken.kind)) { reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList()); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipeOrAndToken = parseBinaryOperator(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } if (peek().kind == SyntaxKind.EQUAL_TOKEN) { return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isExpression(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundAssignment(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); SyntaxKind typeOrExprKind = typeOrExpr.kind; if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE || typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) { recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS); } else { recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS); } return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) { lhsType = getTypeDescFromExpr(lhsType); rhsType = getTypeDescFromExpr(rhsType); STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType); STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, ParserRuleContext.VARIABLE_NAME); STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP); } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0; } private boolean isDefiniteAction(SyntaxKind kind) { return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 && kind.compareTo(SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode typeDescNode = getTypeDescFromExpr(typeOrExpr); typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypeDescOrExpr(typeDescQualifiers); } private STNode parseTypeDescOrExpr(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers); break; case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList()); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: reportInvalidQualifierList(qualifiers); STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList()); } return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(anonFuncParam, false); default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @param qualifiers Preceding qualifiers * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode qualifierList; STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature; if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) { funcSignature = parseFuncSignature(true); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true); qualifierList = nodes[0]; functionKeyword = nodes[1]; endContext(); return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature); } funcSignature = STNodeFactory.createEmptyNode(); STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false); qualifierList = nodes[0]; functionKeyword = nodes[1]; STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) { ParserRuleContext currentCtx = getCurrentContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); } startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, funcSignature); if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) { switchContext(ParserRuleContext.VAR_DECL_STMT); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: case BITWISE_AND_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipeOrAndToken = parseBinaryOperator(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipeOrAndToken, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr), ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundAssignment(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD, ParserRuleContext.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern, DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP); return STNodeFactory.createCaptureBindingPatternNode(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.isEmpty()) { STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseTypedBindingPattern(typeDescQualifiers, context); } private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); return parseFieldBindingPattern(simpleNameReference); default: recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode simpleNameReference) { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); case COLON_TOKEN: STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); default: recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END); return parseFieldBindingPattern(simpleNameReference); } } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1); } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: if (isPredeclaredIdentifier(nextToken.kind)) { typeRef = parseTypeReference(); break; } recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } return parseErrorArgListBindingPatterns(argListBindingPatterns); } private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) { STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true); if (firstArg == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } switch (firstArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); case ERROR_BINDING_PATTERN: STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier); missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP, DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN); STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN, DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN); argListBindingPatterns.add(missingErrorMsgBP); argListBindingPatterns.add(missingComma); argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(firstArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind); default: addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBindingPatterns(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd == null) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false); assert secondArg != null; switch (secondArg.kind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case REST_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind); default: updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns); } } private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns, SyntaxKind lastValidArgKind) { while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false); assert currentArg != null; DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.isEmpty()) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: STNode argNameOrSimpleBindingPattern = consume(); return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); case CLOSE_PAREN_TOKEN: if (isFirstArg) { return null; } default: recover(peek(), context); return parseErrorArgListBindingPattern(context, isFirstArg); } } private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) { STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS: break; case NONE: default: STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(getBindingPattern(member, true)); memberList.add(memberEnd); bindingPattern = parseAsListBindingPattern(openBracket, memberList); typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) || isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode closeBracket = parseCloseBracket(); endContext(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = getKeyExpr(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE && context != ParserRuleContext.JOIN_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = getKeyExpr(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = getKeyExpr(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS; if (isTypedBindingPattern) { recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS; } recover(peek(), recoveryCtx); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode getKeyExpr(STNode member) { if (member == null) { STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier); return STNodeFactory.createNodeList(missingVarRef); } return STNodeFactory.createNodeList(member); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns = STNodeFactory.createEmptyNodeList(); if (!isEmpty(member)) { SyntaxKind memberKind = member.kind; if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) { STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME); STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken); return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName); } STNode bindingPattern = getBindingPattern(member, true); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @return Parsed node */ private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor; STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } if (isTypedBindingPattern) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr); } STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } /** * Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param pipeOrAndToken pipe or bitwise-and token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) { if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); } else { return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc); } } /** * Merges two types separated by <code>|</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param pipeToken pipe token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) { if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc); } else { return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc); } } /** * Merges two types separated by <code>&</code> into one type, while taking precedence * and associativity into account. * * @param lhsTypeDesc lhs type * @param bitwiseAndToken bitwise-and token * @param rhsTypeDesc rhs type * @return a TypeDescriptorNode */ private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc); return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc); } else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc); return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc); } else { rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc); return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc); } } if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc); } else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc; return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc); } else { return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc); } } private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken, STUnionTypeDescriptorNode unionTypeDesc) { STNode leftTypeDesc = unionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc); return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken, STUnionTypeDescriptorNode unionTypeDesc) { STNode leftTypeDesc = unionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken, (STUnionTypeDescriptorNode) leftTypeDesc)); } if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc); return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken, STIntersectionTypeDescriptorNode intersectionTypeDesc) { STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc; if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken, (STIntersectionTypeDescriptorNode) leftTypeDesc)); } leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc); return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc); } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: if (isTypedBindingPattern) { return SyntaxKind.ARRAY_TYPE_DESC; } return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; case ERROR_CONSTRUCTOR: if (isTypedBindingPattern) { return SyntaxKind.LIST_BINDING_PATTERN; } if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; default: if (isTypedBindingPattern) { return SyntaxKind.NONE; } return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList() { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, false); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member); return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case MEMBER_TYPE_DESC: case REST_TYPE: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { List<STNode> typeDescQualifiers = new ArrayList<>(); return parseStatementStartBracketedListMember(typeDescQualifiers); } private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) { parseTypeDescQualifiers(qualifiers); STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: reportInvalidQualifierList(qualifiers); return parseMemberBracketedList(); case IDENTIFIER_TOKEN: reportInvalidQualifierList(qualifiers); STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } nextToken = peek(); if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) { return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: reportInvalidQualifierList(qualifiers); return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: reportInvalidQualifierList(qualifiers); STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPatternOrErrorConstructor(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: reportInvalidQualifierList(qualifiers); return parseRestBindingOrSpreadMember(); case XML_KEYWORD: case STRING_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: reportInvalidQualifierList(qualifiers); if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(qualifiers); case FUNCTION_KEYWORD: return parseAnonFuncExprOrFuncTypeDesc(qualifiers); case AT_TOKEN: return parseTupleMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { reportInvalidQualifierList(qualifiers); return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); return parseStatementStartBracketedListMember(qualifiers); } } private STNode parseRestBindingOrSpreadMember() { STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return STNodeFactory.createRestBindingPatternNode(ellipsis, expr); } else { return STNodeFactory.createSpreadMemberNode(ellipsis, expr); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case REST_TYPE: case MEMBER_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member); return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: STToken nextNextToken = getNextNextToken(); if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN || nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorConstructorExpr(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); case AT_TOKEN: return parseTupleMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members, false); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FINAL_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START; } Solution solution = recover(peek(), recoveryCtx); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createEmptyNode(); break; } return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FINAL_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case ISOLATED_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START; } Solution solution = recover(peek(), recoveryCtx); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createEmptyNode(); break; } return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
I did change it in https://github.com/apache/beam/pull/14849/files but it seems like this portion is not removed during cherry-pick.
public PipelineResult runTest(Pipeline pipeline) { Twister2PipelineExecutionEnvironment env = new Twister2PipelineExecutionEnvironment(options); LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read")) { ExperimentalOptions.addExperiment( pipeline.getOptions().as(ExperimentalOptions.class), "beam_fn_api_use_deprecated_read"); ExperimentalOptions.addExperiment( pipeline.getOptions().as(ExperimentalOptions.class), "use_deprecated_read"); } SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); env.translate(pipeline); setupSystemTest(options); Map configMap = new HashMap(); configMap.put(SIDEINPUTS, extractNames(env.getSideInputs())); configMap.put(LEAVES, extractNames(env.getLeaves())); configMap.put(GRAPH, env.getTSetGraph()); configMap.put("twister2.network.buffer.size", 32000); configMap.put("twister2.network.sendBuffer.count", options.getParallelism()); Config config = ResourceAllocator.loadConfig(configMap); JobConfig jobConfig = new JobConfig(); int workers = options.getParallelism(); Twister2Job twister2Job = Twister2Job.newBuilder() .setJobName(options.getJobName()) .setWorkerClass(BeamBatchWorker.class) .addComputeResource(options.getWorkerCPUs(), options.getRamMegaBytes(), workers) .setConfig(jobConfig) .build(); Twister2JobState jobState = LocalSubmitter.submitJob(twister2Job, config); Twister2PipelineResult result = new Twister2PipelineResult(jobState); if (result.state == PipelineResult.State.FAILED) { throw new RuntimeException("Pipeline execution failed", jobState.getCause()); } return result; }
pipeline.getOptions().as(ExperimentalOptions.class), "beam_fn_api_use_deprecated_read");
public PipelineResult runTest(Pipeline pipeline) { Twister2PipelineExecutionEnvironment env = new Twister2PipelineExecutionEnvironment(options); LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read")) { ExperimentalOptions.addExperiment( pipeline.getOptions().as(ExperimentalOptions.class), "beam_fn_api_use_deprecated_read"); ExperimentalOptions.addExperiment( pipeline.getOptions().as(ExperimentalOptions.class), "use_deprecated_read"); } SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); env.translate(pipeline); setupSystemTest(options); Map configMap = new HashMap(); configMap.put(SIDEINPUTS, extractNames(env.getSideInputs())); configMap.put(LEAVES, extractNames(env.getLeaves())); configMap.put(GRAPH, env.getTSetGraph()); configMap.put("twister2.network.buffer.size", 32000); configMap.put("twister2.network.sendBuffer.count", options.getParallelism()); Config config = ResourceAllocator.loadConfig(configMap); JobConfig jobConfig = new JobConfig(); int workers = options.getParallelism(); Twister2Job twister2Job = Twister2Job.newBuilder() .setJobName(options.getJobName()) .setWorkerClass(BeamBatchWorker.class) .addComputeResource(options.getWorkerCPUs(), options.getRamMegaBytes(), workers) .setConfig(jobConfig) .build(); Twister2JobState jobState = LocalSubmitter.submitJob(twister2Job, config); Twister2PipelineResult result = new Twister2PipelineResult(jobState); if (result.state == PipelineResult.State.FAILED) { throw new RuntimeException("Pipeline execution failed", jobState.getCause()); } return result; }
class Twister2Runner extends PipelineRunner<PipelineResult> { private static final Logger LOG = Logger.getLogger(Twister2Runner.class.getName()); private static final String SIDEINPUTS = "sideInputs"; private static final String LEAVES = "leaves"; private static final String GRAPH = "graph"; /** Provided options. */ private final Twister2PipelineOptions options; protected Twister2Runner(Twister2PipelineOptions options) { this.options = options; } public static Twister2Runner fromOptions(PipelineOptions options) { return new Twister2Runner( PipelineOptionsValidator.validate(Twister2PipelineOptions.class, options)); } @Override public PipelineResult run(Pipeline pipeline) { Twister2PipelineExecutionEnvironment env = new Twister2PipelineExecutionEnvironment(options); LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } env.translate(pipeline); setupSystem(options); Map configMap = new HashMap(); JobConfig jobConfig = new JobConfig(); if (isLocalMode(options)) { options.setParallelism(1); configMap.put(SIDEINPUTS, extractNames(env.getSideInputs())); configMap.put(LEAVES, extractNames(env.getLeaves())); configMap.put(GRAPH, env.getTSetGraph()); configMap.put("twister2.network.buffer.size", 32000); configMap.put("twister2.network.sendBuffer.count", options.getParallelism()); LOG.warning("Twister2 Local Mode currently only supports single worker"); } else { jobConfig.put(SIDEINPUTS, extractNames(env.getSideInputs())); jobConfig.put(LEAVES, extractNames(env.getLeaves())); jobConfig.put(GRAPH, env.getTSetGraph()); } Config config = ResourceAllocator.loadConfig(configMap); int workers = options.getParallelism(); Twister2Job twister2Job = Twister2Job.newBuilder() .setJobName(options.getJobName()) .setWorkerClass(BeamBatchWorker.class) .addComputeResource(options.getWorkerCPUs(), options.getRamMegaBytes(), workers) .setConfig(jobConfig) .build(); Twister2JobState jobState; if (isLocalMode(options)) { jobState = LocalSubmitter.submitJob(twister2Job, config); } else { jobState = Twister2Submitter.submitJob(twister2Job, config); } Twister2PipelineResult result = new Twister2PipelineResult(jobState); return result; } /** Check if the Runner is set to use Twister local mode or pointing to a deployment. */ private boolean isLocalMode(Twister2PipelineOptions options) { if (options.getTwister2Home() == null || "".equals(options.getTwister2Home())) { return true; } else { return false; } } private void setupSystem(Twister2PipelineOptions options) { prepareFilesToStage(options); zipFilesToStage(options); System.setProperty("cluster_type", options.getClusterType()); System.setProperty("job_file", options.getJobFileZip()); System.setProperty("job_type", options.getJobType()); if (isLocalMode(options)) { System.setProperty("twister2_home", System.getProperty("java.io.tmpdir")); System.setProperty("config_dir", System.getProperty("java.io.tmpdir") + "/conf/"); } else { System.setProperty("twister2_home", options.getTwister2Home()); System.setProperty("config_dir", options.getTwister2Home() + "/conf/"); File cDir = new File(System.getProperty("config_dir"), options.getClusterType()); String[] filesList = new String[] { "core.yaml", "network.yaml", "data.yaml", "resource.yaml", "task.yaml", }; for (String file : filesList) { File toCheck = new File(cDir, file); if (!toCheck.exists()) { throw new Twister2RuntimeException( "Couldn't find " + file + " in config directory specified."); } } FileInputStream fis = null; try { fis = new FileInputStream(new File(cDir, "logger.properties")); LogManager.getLogManager().readConfiguration(fis); fis.close(); } catch (IOException e) { LOG.warning("Couldn't load logging configuration"); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOG.info(e.getMessage()); } } } } } private void setupSystemTest(Twister2PipelineOptions options) { prepareFilesToStage(options); zipFilesToStage(options); System.setProperty("cluster_type", options.getClusterType()); System.setProperty("twister2_home", System.getProperty("java.io.tmpdir")); System.setProperty("job_file", options.getJobFileZip()); System.setProperty("job_type", options.getJobType()); } private Set<String> extractNames(Set<TSet> leaves) { Set<String> results = new HashSet<>(); for (TSet leaf : leaves) { results.add(leaf.getId()); } return results; } private Map<String, String> extractNames(Map<String, BatchTSet<?>> sideInputs) { Map<String, String> results = new LinkedHashMap<>(); for (Map.Entry<String, BatchTSet<?>> entry : sideInputs.entrySet()) { results.put(entry.getKey(), entry.getValue().getId()); } return results; } /** * Classpath contains non jar files (eg. directories with .class files or empty directories) will * cause exception in running log. */ private void prepareFilesToStage(Twister2PipelineOptions options) { PipelineResources.prepareFilesForStaging(options); } /** * creates a single zip file from all the jar files that are listed as files to stage in options. * * @param options */ private void zipFilesToStage(Twister2PipelineOptions options) { File zipFile = null; Set<String> jarSet = new HashSet<>(); List<String> filesToStage = options.getFilesToStage(); List<String> trimmed = new ArrayList<>(); for (String file : filesToStage) { if (!file.contains("/org/twister2")) { trimmed.add(file); } } FileInputStream fis = null; try { zipFile = File.createTempFile("twister2-", ".zip"); FileOutputStream fos = new FileOutputStream(zipFile); ZipOutputStream zipOut = new ZipOutputStream(fos); zipOut.putNextEntry(new ZipEntry("lib/")); for (String srcFile : trimmed) { File fileToZip = new File(srcFile); if (!jarSet.contains(fileToZip.getName())) { jarSet.add(fileToZip.getName()); } else { continue; } fis = new FileInputStream(fileToZip); ZipEntry zipEntry = new ZipEntry("lib/" + fileToZip.getName()); zipOut.putNextEntry(zipEntry); byte[] bytes = new byte[1024]; int length; while ((length = fis.read(bytes)) >= 0) { zipOut.write(bytes, 0, length); } fis.close(); } zipOut.close(); fos.close(); zipFile.deleteOnExit(); } catch (FileNotFoundException e) { LOG.info(e.getMessage()); } catch (IOException e) { LOG.info(e.getMessage()); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOG.info(e.getMessage()); } } } if (zipFile != null) { options.setJobFileZip(zipFile.getPath()); } } private static List<PTransformOverride> getDefaultOverrides() { List<PTransformOverride> overrides = ImmutableList.<PTransformOverride>builder() .add( PTransformOverride.of( PTransformMatchers.splittableParDo(), new SplittableParDo.OverrideFactory())) .add( PTransformOverride.of( PTransformMatchers.urnEqualTo( PTransformTranslation.SPLITTABLE_PROCESS_KEYED_URN), new SplittableParDoNaiveBounded.OverrideFactory())) .build(); return overrides; } }
class Twister2Runner extends PipelineRunner<PipelineResult> { private static final Logger LOG = Logger.getLogger(Twister2Runner.class.getName()); private static final String SIDEINPUTS = "sideInputs"; private static final String LEAVES = "leaves"; private static final String GRAPH = "graph"; /** Provided options. */ private final Twister2PipelineOptions options; protected Twister2Runner(Twister2PipelineOptions options) { this.options = options; } public static Twister2Runner fromOptions(PipelineOptions options) { return new Twister2Runner( PipelineOptionsValidator.validate(Twister2PipelineOptions.class, options)); } @Override public PipelineResult run(Pipeline pipeline) { Twister2PipelineExecutionEnvironment env = new Twister2PipelineExecutionEnvironment(options); LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } env.translate(pipeline); setupSystem(options); Map configMap = new HashMap(); JobConfig jobConfig = new JobConfig(); if (isLocalMode(options)) { options.setParallelism(1); configMap.put(SIDEINPUTS, extractNames(env.getSideInputs())); configMap.put(LEAVES, extractNames(env.getLeaves())); configMap.put(GRAPH, env.getTSetGraph()); configMap.put("twister2.network.buffer.size", 32000); configMap.put("twister2.network.sendBuffer.count", options.getParallelism()); LOG.warning("Twister2 Local Mode currently only supports single worker"); } else { jobConfig.put(SIDEINPUTS, extractNames(env.getSideInputs())); jobConfig.put(LEAVES, extractNames(env.getLeaves())); jobConfig.put(GRAPH, env.getTSetGraph()); } Config config = ResourceAllocator.loadConfig(configMap); int workers = options.getParallelism(); Twister2Job twister2Job = Twister2Job.newBuilder() .setJobName(options.getJobName()) .setWorkerClass(BeamBatchWorker.class) .addComputeResource(options.getWorkerCPUs(), options.getRamMegaBytes(), workers) .setConfig(jobConfig) .build(); Twister2JobState jobState; if (isLocalMode(options)) { jobState = LocalSubmitter.submitJob(twister2Job, config); } else { jobState = Twister2Submitter.submitJob(twister2Job, config); } Twister2PipelineResult result = new Twister2PipelineResult(jobState); return result; } /** Check if the Runner is set to use Twister local mode or pointing to a deployment. */ private boolean isLocalMode(Twister2PipelineOptions options) { if (options.getTwister2Home() == null || "".equals(options.getTwister2Home())) { return true; } else { return false; } } private void setupSystem(Twister2PipelineOptions options) { prepareFilesToStage(options); zipFilesToStage(options); System.setProperty("cluster_type", options.getClusterType()); System.setProperty("job_file", options.getJobFileZip()); System.setProperty("job_type", options.getJobType()); if (isLocalMode(options)) { System.setProperty("twister2_home", System.getProperty("java.io.tmpdir")); System.setProperty("config_dir", System.getProperty("java.io.tmpdir") + "/conf/"); } else { System.setProperty("twister2_home", options.getTwister2Home()); System.setProperty("config_dir", options.getTwister2Home() + "/conf/"); File cDir = new File(System.getProperty("config_dir"), options.getClusterType()); String[] filesList = new String[] { "core.yaml", "network.yaml", "data.yaml", "resource.yaml", "task.yaml", }; for (String file : filesList) { File toCheck = new File(cDir, file); if (!toCheck.exists()) { throw new Twister2RuntimeException( "Couldn't find " + file + " in config directory specified."); } } FileInputStream fis = null; try { fis = new FileInputStream(new File(cDir, "logger.properties")); LogManager.getLogManager().readConfiguration(fis); fis.close(); } catch (IOException e) { LOG.warning("Couldn't load logging configuration"); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOG.info(e.getMessage()); } } } } } private void setupSystemTest(Twister2PipelineOptions options) { prepareFilesToStage(options); zipFilesToStage(options); System.setProperty("cluster_type", options.getClusterType()); System.setProperty("twister2_home", System.getProperty("java.io.tmpdir")); System.setProperty("job_file", options.getJobFileZip()); System.setProperty("job_type", options.getJobType()); } private Set<String> extractNames(Set<TSet> leaves) { Set<String> results = new HashSet<>(); for (TSet leaf : leaves) { results.add(leaf.getId()); } return results; } private Map<String, String> extractNames(Map<String, BatchTSet<?>> sideInputs) { Map<String, String> results = new LinkedHashMap<>(); for (Map.Entry<String, BatchTSet<?>> entry : sideInputs.entrySet()) { results.put(entry.getKey(), entry.getValue().getId()); } return results; } /** * Classpath contains non jar files (eg. directories with .class files or empty directories) will * cause exception in running log. */ private void prepareFilesToStage(Twister2PipelineOptions options) { PipelineResources.prepareFilesForStaging(options); } /** * creates a single zip file from all the jar files that are listed as files to stage in options. * * @param options */ private void zipFilesToStage(Twister2PipelineOptions options) { File zipFile = null; Set<String> jarSet = new HashSet<>(); List<String> filesToStage = options.getFilesToStage(); List<String> trimmed = new ArrayList<>(); for (String file : filesToStage) { if (!file.contains("/org/twister2")) { trimmed.add(file); } } FileInputStream fis = null; try { zipFile = File.createTempFile("twister2-", ".zip"); FileOutputStream fos = new FileOutputStream(zipFile); ZipOutputStream zipOut = new ZipOutputStream(fos); zipOut.putNextEntry(new ZipEntry("lib/")); for (String srcFile : trimmed) { File fileToZip = new File(srcFile); if (!jarSet.contains(fileToZip.getName())) { jarSet.add(fileToZip.getName()); } else { continue; } fis = new FileInputStream(fileToZip); ZipEntry zipEntry = new ZipEntry("lib/" + fileToZip.getName()); zipOut.putNextEntry(zipEntry); byte[] bytes = new byte[1024]; int length; while ((length = fis.read(bytes)) >= 0) { zipOut.write(bytes, 0, length); } fis.close(); } zipOut.close(); fos.close(); zipFile.deleteOnExit(); } catch (FileNotFoundException e) { LOG.info(e.getMessage()); } catch (IOException e) { LOG.info(e.getMessage()); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOG.info(e.getMessage()); } } } if (zipFile != null) { options.setJobFileZip(zipFile.getPath()); } } private static List<PTransformOverride> getDefaultOverrides() { List<PTransformOverride> overrides = ImmutableList.<PTransformOverride>builder() .add( PTransformOverride.of( PTransformMatchers.splittableParDo(), new SplittableParDo.OverrideFactory())) .add( PTransformOverride.of( PTransformMatchers.urnEqualTo( PTransformTranslation.SPLITTABLE_PROCESS_KEYED_URN), new SplittableParDoNaiveBounded.OverrideFactory())) .build(); return overrides; } }
Why do we traverse the children here? Can't we use the specific APIs instead?
public Optional<DocumentSymbol> transform(ClassDefinitionNode classDefinitionNode) { String name = classDefinitionNode.className().text(); SymbolKind symbolKind = SymbolKind.Class; Range range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode); Optional<MetadataNode> metadata = classDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformChildren(classDefinitionNode.children()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); }
Optional<MetadataNode> metadata = classDefinitionNode.metadata();
public Optional<DocumentSymbol> transform(ClassDefinitionNode classDefinitionNode) { String name = classDefinitionNode.className().text(); SymbolKind symbolKind = SymbolKind.Class; Range range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode); Optional<MetadataNode> metadata = classDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(classDefinitionNode.members()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); }
class DocumentSymbolResolver extends NodeTransformer<Optional<DocumentSymbol>> { private List<DocumentSymbol> documentSymbolStore; private DocumentSymbolContext context; DocumentSymbolResolver(DocumentSymbolContext context) { this.context = context; documentSymbolStore = new ArrayList<>(); } public List<DocumentSymbol> getDocumentSymbolStore() { return this.documentSymbolStore; } @Override protected Optional<DocumentSymbol> transformSyntaxNode(Node node) { return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(ModulePartNode modulePartNode) { Path filePath = context.filePath(); Optional<Module> module = context.workspace().module(filePath); String name; if (module.isPresent()) { if (module.get().isDefaultModule()) { name = "Main"; } else { name = module.get().moduleName().moduleNamePart(); } } else { name = "Module"; } List<DocumentSymbol> children = transformChildren(modulePartNode.children()); Range range = DocumentSymbolUtil.generateNodeRange(modulePartNode); return Optional.ofNullable(createDocumentSymbol(name, SymbolKind.Module, null, range, range, false, children, this.context)); } @Override public Optional<DocumentSymbol> transform(FunctionDefinitionNode functionDefinitionNode) { String name; Range range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode); SymbolKind symbolKind; Optional<MetadataNode> metadata = functionDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); switch (functionDefinitionNode.kind()) { case FUNCTION_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Function; break; case OBJECT_METHOD_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Method; break; case RESOURCE_ACCESSOR_DEFINITION: StringBuilder resourceFuncName = new StringBuilder(functionDefinitionNode.functionName().text()); for (Node child : functionDefinitionNode.children()) { if (child.kind() == SyntaxKind.IDENTIFIER_TOKEN && !((IdentifierToken) child).text().equals(functionDefinitionNode.functionName().text())) { resourceFuncName.append(":").append(((IdentifierToken) child).text()); break; } } name = resourceFuncName.toString(); symbolKind = SymbolKind.Function; break; default: return Optional.empty(); } return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override @Override public Optional<DocumentSymbol> transform(ServiceDeclarationNode serviceDeclarationNode) { String name = "service " + serviceDeclarationNode.absoluteResourcePath().stream() .map(Node::toString).collect(Collectors.joining("")) + " on " + serviceDeclarationNode.expressions().stream() .map(Node::toString).collect(Collectors.joining("")); SymbolKind symbolKind = SymbolKind.Interface; Range range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode); Optional<MetadataNode> metadata = serviceDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformChildren(serviceDeclarationNode.children()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); } /** * Provided a ChildNodes list generate the corresponding document symbols. * * @param nodes {@link ChildNodeList} Child nodes list. * @return {@link List<DocumentSymbol>} Generated list of document symbols. */ private List<DocumentSymbol> transformChildren(ChildNodeList nodes) { List<DocumentSymbol> childSymbols = new ArrayList<>(); nodes.forEach(node -> { Optional<DocumentSymbol> docSymbol = node.apply(this); if (docSymbol != null) { docSymbol.ifPresent(childSymbols::add); } }); return childSymbols; } /** * Document symbol builder. * * @param name symbol name. * @param kind symbol kind. * @param detail symbol detail. * @param range Range of the symbol. * @param selectionRange selection range of the symbol. * @param isDeprecated Whether the symbol is deprecated. * @param children Child document symbols. * @param context Document symbol context. * @return */ public DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, String detail, Range range, Range selectionRange, boolean isDeprecated, List<DocumentSymbol> children, DocumentSymbolContext context) { if (name == null || name.isEmpty()) { return null; } DocumentSymbol documentSymbol = new DocumentSymbol(); documentSymbol.setName(name); documentSymbol.setKind(kind); documentSymbol.setDetail(detail); documentSymbol.setRange(range); documentSymbol.setSelectionRange(selectionRange); if (isDeprecated && context.supportedTags().isPresent() && context.supportedTags().get().getValueSet().contains(SymbolTag.Deprecated)) { documentSymbol.setTags(List.of(SymbolTag.Deprecated)); } if (context.getHierarchicalDocumentSymbolSupport()) { documentSymbol.setChildren(children); } else { this.documentSymbolStore.add(documentSymbol); } return documentSymbol; } }
class DocumentSymbolResolver extends NodeTransformer<Optional<DocumentSymbol>> { private List<DocumentSymbol> documentSymbolStore; private DocumentSymbolContext context; DocumentSymbolResolver(DocumentSymbolContext context) { this.context = context; documentSymbolStore = new ArrayList<>(); } public List<DocumentSymbol> getDocumentSymbolStore() { return this.documentSymbolStore; } @Override public Optional<DocumentSymbol> transform(Token token) { return Optional.empty(); } @Override protected Optional<DocumentSymbol> transformSyntaxNode(Node node) { return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(ModulePartNode modulePartNode) { List<DocumentSymbol> memberSymbols = new ArrayList<>(); for (ModuleMemberDeclarationNode member : modulePartNode.members()) { member.apply(this).ifPresent(memberSymbols::add); } if (context.getHierarchicalDocumentSymbolSupport()) { this.documentSymbolStore.addAll(memberSymbols); } /* since module part node is a collection of multiple documents. We don't create the document symbol node corresponding to the module part node here. */ return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(FunctionDefinitionNode functionDefinitionNode) { String name = ""; Range range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode); SymbolKind symbolKind; Optional<MetadataNode> metadata = functionDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); switch (functionDefinitionNode.kind()) { case FUNCTION_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Function; break; case OBJECT_METHOD_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Method; break; case RESOURCE_ACCESSOR_DEFINITION: String accessor = functionDefinitionNode.functionName().text(); List<String> pathParams = new ArrayList<>(); String resourcePath = ""; for (Node child : functionDefinitionNode.children()) { if (child.kind() == SyntaxKind.IDENTIFIER_TOKEN && !((IdentifierToken) child).text().equals(accessor)) { resourcePath = ((IdentifierToken) child).text(); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_SEGMENT_PARAM) { String[] param = child.toSourceCode() .replaceAll("\\[|\\]", "").split("\\s+"); pathParams.add(param[param.length - 1]); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) { pathParams.add("*"); } } if (!accessor.isEmpty()) { name = accessor + ":" + resourcePath; if (!pathParams.isEmpty()) { String params = pathParams.stream().map(param -> "{" + param + "}") .collect(Collectors.joining("/")); name = name + (resourcePath.isEmpty() ? params : "/" + params); } else if (resourcePath.isEmpty()) { name = name + "/"; } } symbolKind = SymbolKind.Function; break; default: return Optional.empty(); } return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override @Override public Optional<DocumentSymbol> transform(ServiceDeclarationNode serviceDeclarationNode) { String name = "service " + serviceDeclarationNode.absoluteResourcePath().stream() .map(Node::toSourceCode).collect(Collectors.joining("")) + " on " + serviceDeclarationNode.expressions().stream() .map(Node::toSourceCode).collect(Collectors.joining("")); SymbolKind symbolKind = SymbolKind.Interface; Range range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode); Optional<MetadataNode> metadata = serviceDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(serviceDeclarationNode.members()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); } /** * Provided a ChildNodes list generate the corresponding document symbols. * * @param nodes {@link NodeList<? extends Node>} Member nodes list. * @return {@link List<DocumentSymbol>} Generated list of document symbols. */ private List<DocumentSymbol> transformMembers(NodeList<? extends Node> nodes) { List<DocumentSymbol> childSymbols = new ArrayList<>(); nodes.forEach(node -> { node.apply(this).ifPresent(childSymbols::add); }); return childSymbols; } /** * Document symbol builder. * * @param name symbol name. * @param kind symbol kind. * @param detail symbol detail. * @param range Range of the symbol. * @param selectionRange selection range of the symbol. * @param isDeprecated Whether the symbol is deprecated. * @param children Child document symbols. * @param context Document symbol context. * @return */ public DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, String detail, Range range, Range selectionRange, boolean isDeprecated, List<DocumentSymbol> children, DocumentSymbolContext context) { if (name == null || name.isEmpty()) { return null; } DocumentSymbol documentSymbol = new DocumentSymbol(); documentSymbol.setName(name); documentSymbol.setKind(kind); documentSymbol.setDetail(detail); documentSymbol.setRange(range); documentSymbol.setSelectionRange(selectionRange); if (isDeprecated && context.deprecatedSupport()) { documentSymbol.setTags(List.of(SymbolTag.Deprecated)); } if (context.getHierarchicalDocumentSymbolSupport()) { documentSymbol.setChildren(children); } else { this.documentSymbolStore.add(documentSymbol); } return documentSymbol; } }
It's very minor but could we call it `MONGODB_PANACHE` as the feature? See a few lines below.
CapabilityBuildItem capability() { return new CapabilityBuildItem(Capabilities.MONGO_PANACHE); }
return new CapabilityBuildItem(Capabilities.MONGO_PANACHE);
CapabilityBuildItem capability() { return new CapabilityBuildItem(Capabilities.MONGODB_PANACHE); }
class PanacheResourceProcessor { static final DotName DOTNAME_PANACHE_REPOSITORY_BASE = DotName.createSimple(PanacheMongoRepositoryBase.class.getName()); private static final DotName DOTNAME_PANACHE_REPOSITORY = DotName.createSimple(PanacheMongoRepository.class.getName()); static final DotName DOTNAME_PANACHE_ENTITY_BASE = DotName.createSimple(PanacheMongoEntityBase.class.getName()); private static final DotName DOTNAME_PANACHE_ENTITY = DotName.createSimple(PanacheMongoEntity.class.getName()); private static final DotName DOTNAME_OBJECT_ID = DotName.createSimple(ObjectId.class.getName()); @BuildStep @BuildStep FeatureBuildItem featureBuildItem() { return new FeatureBuildItem(FeatureBuildItem.MONGODB_PANACHE); } @BuildStep void registerJsonbSerDeser(BuildProducer<JsonbSerializerBuildItem> jsonbSerializers, BuildProducer<JsonbDeserializerBuildItem> jsonbDeserializers) { jsonbSerializers .produce(new JsonbSerializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdSerializer.class.getName())); jsonbDeserializers .produce(new JsonbDeserializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdDeserializer.class.getName())); } @BuildStep void registerJacksonSerDeser(BuildProducer<JacksonModuleBuildItem> customSerDeser) { customSerDeser.produce( new JacksonModuleBuildItem.Builder("ObjectIdModule") .add(io.quarkus.mongodb.panache.jackson.ObjectIdSerializer.class.getName(), io.quarkus.mongodb.panache.jackson.ObjectIdDeserializer.class.getName(), ObjectId.class.getName()) .build()); } @BuildStep ReflectiveHierarchyBuildItem registerForReflection(CombinedIndexBuildItem index) { Indexer indexer = new Indexer(); Set<DotName> additionalIndex = new HashSet<>(); IndexingUtil.indexClass(ObjectId.class.getName(), indexer, index.getIndex(), additionalIndex, PanacheResourceProcessor.class.getClassLoader()); CompositeIndex compositeIndex = CompositeIndex.create(index.getIndex(), indexer.complete()); Type type = Type.create(DOTNAME_OBJECT_ID, Type.Kind.CLASS); return new ReflectiveHierarchyBuildItem(type, compositeIndex); } @BuildStep void build(CombinedIndexBuildItem index, ApplicationIndexBuildItem applicationIndex, BuildProducer<BytecodeTransformerBuildItem> transformers) throws Exception { PanacheMongoRepositoryEnhancer daoEnhancer = new PanacheMongoRepositoryEnhancer(index.getIndex()); Set<String> daoClasses = new HashSet<>(); for (ClassInfo classInfo : index.getIndex().getAllKnownImplementors(DOTNAME_PANACHE_REPOSITORY_BASE)) { if (classInfo.name().equals(DOTNAME_PANACHE_REPOSITORY)) continue; daoClasses.add(classInfo.name().toString()); } for (ClassInfo classInfo : index.getIndex().getAllKnownImplementors(DOTNAME_PANACHE_REPOSITORY)) { daoClasses.add(classInfo.name().toString()); } for (String daoClass : daoClasses) { transformers.produce(new BytecodeTransformerBuildItem(daoClass, daoEnhancer)); } PanacheMongoEntityEnhancer modelEnhancer = new PanacheMongoEntityEnhancer(index.getIndex()); Set<String> modelClasses = new HashSet<>(); for (ClassInfo classInfo : index.getIndex().getAllKnownSubclasses(DOTNAME_PANACHE_ENTITY_BASE)) { if (classInfo.name().equals(DOTNAME_PANACHE_ENTITY)) continue; if (modelClasses.add(classInfo.name().toString())) modelEnhancer.collectFields(classInfo); } for (ClassInfo classInfo : index.getIndex().getAllKnownSubclasses(DOTNAME_PANACHE_ENTITY)) { if (modelClasses.add(classInfo.name().toString())) modelEnhancer.collectFields(classInfo); } for (String modelClass : modelClasses) { transformers.produce(new BytecodeTransformerBuildItem(modelClass, modelEnhancer)); } if (!modelEnhancer.entities.isEmpty()) { PanacheFieldAccessEnhancer panacheFieldAccessEnhancer = new PanacheFieldAccessEnhancer( modelEnhancer.getModelInfo()); for (ClassInfo classInfo : applicationIndex.getIndex().getKnownClasses()) { String className = classInfo.name().toString(); if (!modelClasses.contains(className)) { transformers.produce(new BytecodeTransformerBuildItem(className, panacheFieldAccessEnhancer)); } } } } }
class PanacheResourceProcessor { static final DotName DOTNAME_PANACHE_REPOSITORY_BASE = DotName.createSimple(PanacheMongoRepositoryBase.class.getName()); private static final DotName DOTNAME_PANACHE_REPOSITORY = DotName.createSimple(PanacheMongoRepository.class.getName()); static final DotName DOTNAME_PANACHE_ENTITY_BASE = DotName.createSimple(PanacheMongoEntityBase.class.getName()); private static final DotName DOTNAME_PANACHE_ENTITY = DotName.createSimple(PanacheMongoEntity.class.getName()); private static final DotName DOTNAME_OBJECT_ID = DotName.createSimple(ObjectId.class.getName()); @BuildStep @BuildStep FeatureBuildItem featureBuildItem() { return new FeatureBuildItem(FeatureBuildItem.MONGODB_PANACHE); } @BuildStep void registerJsonbSerDeser(BuildProducer<JsonbSerializerBuildItem> jsonbSerializers, BuildProducer<JsonbDeserializerBuildItem> jsonbDeserializers) { jsonbSerializers .produce(new JsonbSerializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdSerializer.class.getName())); jsonbDeserializers .produce(new JsonbDeserializerBuildItem(io.quarkus.mongodb.panache.jsonb.ObjectIdDeserializer.class.getName())); } @BuildStep void registerJacksonSerDeser(BuildProducer<JacksonModuleBuildItem> customSerDeser) { customSerDeser.produce( new JacksonModuleBuildItem.Builder("ObjectIdModule") .add(io.quarkus.mongodb.panache.jackson.ObjectIdSerializer.class.getName(), io.quarkus.mongodb.panache.jackson.ObjectIdDeserializer.class.getName(), ObjectId.class.getName()) .build()); } @BuildStep ReflectiveHierarchyBuildItem registerForReflection(CombinedIndexBuildItem index) { Indexer indexer = new Indexer(); Set<DotName> additionalIndex = new HashSet<>(); IndexingUtil.indexClass(ObjectId.class.getName(), indexer, index.getIndex(), additionalIndex, PanacheResourceProcessor.class.getClassLoader()); CompositeIndex compositeIndex = CompositeIndex.create(index.getIndex(), indexer.complete()); Type type = Type.create(DOTNAME_OBJECT_ID, Type.Kind.CLASS); return new ReflectiveHierarchyBuildItem(type, compositeIndex); } @BuildStep void build(CombinedIndexBuildItem index, ApplicationIndexBuildItem applicationIndex, BuildProducer<BytecodeTransformerBuildItem> transformers) throws Exception { PanacheMongoRepositoryEnhancer daoEnhancer = new PanacheMongoRepositoryEnhancer(index.getIndex()); Set<String> daoClasses = new HashSet<>(); for (ClassInfo classInfo : index.getIndex().getAllKnownImplementors(DOTNAME_PANACHE_REPOSITORY_BASE)) { if (classInfo.name().equals(DOTNAME_PANACHE_REPOSITORY)) continue; daoClasses.add(classInfo.name().toString()); } for (ClassInfo classInfo : index.getIndex().getAllKnownImplementors(DOTNAME_PANACHE_REPOSITORY)) { daoClasses.add(classInfo.name().toString()); } for (String daoClass : daoClasses) { transformers.produce(new BytecodeTransformerBuildItem(daoClass, daoEnhancer)); } PanacheMongoEntityEnhancer modelEnhancer = new PanacheMongoEntityEnhancer(index.getIndex()); Set<String> modelClasses = new HashSet<>(); for (ClassInfo classInfo : index.getIndex().getAllKnownSubclasses(DOTNAME_PANACHE_ENTITY_BASE)) { if (classInfo.name().equals(DOTNAME_PANACHE_ENTITY)) continue; if (modelClasses.add(classInfo.name().toString())) modelEnhancer.collectFields(classInfo); } for (ClassInfo classInfo : index.getIndex().getAllKnownSubclasses(DOTNAME_PANACHE_ENTITY)) { if (modelClasses.add(classInfo.name().toString())) modelEnhancer.collectFields(classInfo); } for (String modelClass : modelClasses) { transformers.produce(new BytecodeTransformerBuildItem(modelClass, modelEnhancer)); } if (!modelEnhancer.entities.isEmpty()) { PanacheFieldAccessEnhancer panacheFieldAccessEnhancer = new PanacheFieldAccessEnhancer( modelEnhancer.getModelInfo()); for (ClassInfo classInfo : applicationIndex.getIndex().getKnownClasses()) { String className = classInfo.name().toString(); if (!modelClasses.contains(className)) { transformers.produce(new BytecodeTransformerBuildItem(className, panacheFieldAccessEnhancer)); } } } } }
should be wrapped in a try-catch block
public void testLogicalScopeShouldIgnoreValueGroupName() { Configuration config = new Configuration(); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter.class.getName()); MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config)); GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; MetricGroup group = root.addGroup(key, value); String logicalScope = ((AbstractMetricGroup) group) .getLogicalScope(new DummyCharacterFilter(), registry.getDelimiter(), 0); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertFalse("Value is present in logical scope.", logicalScope.contains(value)); }
MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config));
public void testLogicalScopeShouldIgnoreValueGroupName() throws Exception { Configuration config = new Configuration(); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter.class.getName()); MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config)); try { GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; MetricGroup group = root.addGroup(key, value); String logicalScope = ((AbstractMetricGroup) group) .getLogicalScope(new DummyCharacterFilter(), registry.getDelimiter(), 0); assertThat("Key is missing from logical scope.", logicalScope, containsString(key)); assertThat("Value is present in logical scope.", logicalScope, not(containsString(value))); } finally { registry.shutdown().get(); } }
class MetricGroupTest extends TestLogger { private static final MetricRegistryConfiguration defaultMetricRegistryConfiguration = MetricRegistryConfiguration.defaultMetricRegistryConfiguration(); private MetricRegistryImpl registry; private final MetricRegistryImpl exceptionOnRegister = new ExceptionOnRegisterRegistry(); @Before public void createRegistry() { this.registry = new MetricRegistryImpl(defaultMetricRegistryConfiguration); } @After public void shutdownRegistry() throws Exception { this.registry.shutdown().get(); this.registry = null; } @Test public void sameGroupOnNameCollision() { GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "somegroup"); String groupName = "sometestname"; MetricGroup subgroup1 = group.addGroup(groupName); MetricGroup subgroup2 = group.addGroup(groupName); assertNotNull(subgroup1); assertNotNull(subgroup2); assertTrue(subgroup1 == subgroup2); } /** * Verifies the basic behavior when defining user-defined variables. */ @Test public void testUserDefinedVariable() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertEquals(value, variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertFalse("Value is present in logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link MetricGroup * through the generic code path. */ @Test public void testUserDefinedVariableOnKeyGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key1 = "key1"; String value1 = "value1"; root.addGroup(key1, value1); String key2 = "key2"; String value2 = "value2"; MetricGroup group = root.addGroup(key1).addGroup(key2, value2); String variableValue = group.getAllVariables().get("value2"); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key1 is missing from metric identifier.", identifier.contains("key1")); assertTrue("Key2 is missing from metric identifier.", identifier.contains("key2")); assertTrue("Value2 is missing from metric identifier.", identifier.contains("value2")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key1 is missing from logical scope.", logicalScope.contains(key1)); assertTrue("Key2 is missing from logical scope.", logicalScope.contains(key2)); assertTrue("Value2 is missing from logical scope.", logicalScope.contains(value2)); } /** * Verifies that calling {@link MetricGroup * exists goes through the generic code path. */ @Test public void testNameCollisionForKeyAfterGenericGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key); MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertTrue("Value is missing from logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link MetricGroup * already exists goes through the generic code path. */ @Test public void testNameCollisionForKeyAndValueAfterGenericGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key).addGroup(value); MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertTrue("Value is missing from logical scope.", logicalScope.contains(value)); } /** * Verifies that existing key/value groups are returned when calling {@link MetricGroup */ @Test public void testNameCollisionAfterKeyValueGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key, value); MetricGroup group = root.addGroup(key).addGroup(value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertEquals(value, variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertFalse("Value is present in logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link AbstractMetricGroup * should ignore value as well. */ @Test @Test public void closedGroupDoesNotRegisterMetrics() { GenericMetricGroup group = new GenericMetricGroup( exceptionOnRegister, new DummyAbstractMetricGroup(exceptionOnRegister), "testgroup"); assertFalse(group.isClosed()); group.close(); assertTrue(group.isClosed()); group.counter("testcounter"); group.gauge("testgauge", new Gauge<Object>() { @Override public Object getValue() { return null; } }); } @Test public void closedGroupCreatesClosedGroups() { GenericMetricGroup group = new GenericMetricGroup(exceptionOnRegister, new DummyAbstractMetricGroup(exceptionOnRegister), "testgroup"); assertFalse(group.isClosed()); group.close(); assertTrue(group.isClosed()); AbstractMetricGroup subgroup = (AbstractMetricGroup) group.addGroup("test subgroup"); assertTrue(subgroup.isClosed()); } @Test public void tolerateMetricNameCollisions() { final String name = "abctestname"; GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "testgroup"); assertNotNull(group.counter(name)); assertNotNull(group.counter(name)); } @Test public void tolerateMetricAndGroupNameCollisions() { final String name = "abctestname"; GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "testgroup"); assertNotNull(group.addGroup(name)); assertNotNull(group.counter(name)); } @Test public void testCreateQueryServiceMetricInfo() { JobID jid = new JobID(); JobVertexID vid = new JobVertexID(); AbstractID eid = new AbstractID(); MetricRegistryImpl registry = new MetricRegistryImpl(defaultMetricRegistryConfiguration); TaskManagerMetricGroup tm = new TaskManagerMetricGroup(registry, "host", "id"); TaskManagerJobMetricGroup job = new TaskManagerJobMetricGroup(registry, tm, jid, "jobname"); TaskMetricGroup task = new TaskMetricGroup(registry, job, vid, eid, "taskName", 4, 5); GenericMetricGroup userGroup1 = new GenericMetricGroup(registry, task, "hello"); GenericMetricGroup userGroup2 = new GenericMetricGroup(registry, userGroup1, "world"); QueryScopeInfo.TaskQueryScopeInfo info1 = (QueryScopeInfo.TaskQueryScopeInfo) userGroup1.createQueryServiceMetricInfo(new DummyCharacterFilter()); assertEquals("hello", info1.scope); assertEquals(jid.toString(), info1.jobID); assertEquals(vid.toString(), info1.vertexID); assertEquals(4, info1.subtaskIndex); QueryScopeInfo.TaskQueryScopeInfo info2 = (QueryScopeInfo.TaskQueryScopeInfo) userGroup2.createQueryServiceMetricInfo(new DummyCharacterFilter()); assertEquals("hello.world", info2.scope); assertEquals(jid.toString(), info2.jobID); assertEquals(vid.toString(), info2.vertexID); assertEquals(4, info2.subtaskIndex); } private static class ExceptionOnRegisterRegistry extends MetricRegistryImpl { public ExceptionOnRegisterRegistry() { super(defaultMetricRegistryConfiguration); } @Override public void register(Metric metric, String name, AbstractMetricGroup parent) { fail("Metric should never be registered"); } @Override public void unregister(Metric metric, String name, AbstractMetricGroup parent) { fail("Metric should never be un-registered"); } } /** * A dummy {@link AbstractMetricGroup} to be used when a group is required as an argument but not actually used. */ public static class DummyAbstractMetricGroup extends AbstractMetricGroup { public DummyAbstractMetricGroup(MetricRegistry registry) { super(registry, new String[0], null); } @Override protected QueryScopeInfo createQueryServiceMetricInfo(CharacterFilter filter) { return null; } @Override protected String getGroupName(CharacterFilter filter) { return ""; } @Override protected void addMetric(String name, Metric metric) { } @Override public MetricGroup addGroup(String name) { return new DummyAbstractMetricGroup(registry); } } }
class MetricGroupTest extends TestLogger { private static final MetricRegistryConfiguration defaultMetricRegistryConfiguration = MetricRegistryConfiguration.defaultMetricRegistryConfiguration(); private MetricRegistryImpl registry; private final MetricRegistryImpl exceptionOnRegister = new ExceptionOnRegisterRegistry(); @Before public void createRegistry() { this.registry = new MetricRegistryImpl(defaultMetricRegistryConfiguration); } @After public void shutdownRegistry() throws Exception { this.registry.shutdown().get(); this.registry = null; } @Test public void sameGroupOnNameCollision() { GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "somegroup"); String groupName = "sometestname"; MetricGroup subgroup1 = group.addGroup(groupName); MetricGroup subgroup2 = group.addGroup(groupName); assertNotNull(subgroup1); assertNotNull(subgroup2); assertTrue(subgroup1 == subgroup2); } /** * Verifies the basic behavior when defining user-defined variables. */ @Test public void testUserDefinedVariable() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertEquals(value, variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertFalse("Value is present in logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link MetricGroup * through the generic code path. */ @Test public void testUserDefinedVariableOnKeyGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key1 = "key1"; String value1 = "value1"; root.addGroup(key1, value1); String key2 = "key2"; String value2 = "value2"; MetricGroup group = root.addGroup(key1).addGroup(key2, value2); String variableValue = group.getAllVariables().get("value2"); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key1 is missing from metric identifier.", identifier.contains("key1")); assertTrue("Key2 is missing from metric identifier.", identifier.contains("key2")); assertTrue("Value2 is missing from metric identifier.", identifier.contains("value2")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key1 is missing from logical scope.", logicalScope.contains(key1)); assertTrue("Key2 is missing from logical scope.", logicalScope.contains(key2)); assertTrue("Value2 is missing from logical scope.", logicalScope.contains(value2)); } /** * Verifies that calling {@link MetricGroup * exists goes through the generic code path. */ @Test public void testNameCollisionForKeyAfterGenericGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key); MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertTrue("Value is missing from logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link MetricGroup * already exists goes through the generic code path. */ @Test public void testNameCollisionForKeyAndValueAfterGenericGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key).addGroup(value); MetricGroup group = root.addGroup(key, value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertNull(variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertTrue("Value is missing from logical scope.", logicalScope.contains(value)); } /** * Verifies that existing key/value groups are returned when calling {@link MetricGroup */ @Test public void testNameCollisionAfterKeyValueGroup() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root"); String key = "key"; String value = "value"; root.addGroup(key, value); MetricGroup group = root.addGroup(key).addGroup(value); String variableValue = group.getAllVariables().get(ScopeFormat.asVariable("key")); assertEquals(value, variableValue); String identifier = group.getMetricIdentifier("metric"); assertTrue("Key is missing from metric identifier.", identifier.contains("key")); assertTrue("Value is missing from metric identifier.", identifier.contains("value")); String logicalScope = ((AbstractMetricGroup) group).getLogicalScope(new DummyCharacterFilter()); assertTrue("Key is missing from logical scope.", logicalScope.contains(key)); assertFalse("Value is present in logical scope.", logicalScope.contains(value)); } /** * Verifies that calling {@link AbstractMetricGroup * should ignore value as well. */ @Test @Test public void closedGroupDoesNotRegisterMetrics() { GenericMetricGroup group = new GenericMetricGroup( exceptionOnRegister, new DummyAbstractMetricGroup(exceptionOnRegister), "testgroup"); assertFalse(group.isClosed()); group.close(); assertTrue(group.isClosed()); group.counter("testcounter"); group.gauge("testgauge", new Gauge<Object>() { @Override public Object getValue() { return null; } }); } @Test public void closedGroupCreatesClosedGroups() { GenericMetricGroup group = new GenericMetricGroup(exceptionOnRegister, new DummyAbstractMetricGroup(exceptionOnRegister), "testgroup"); assertFalse(group.isClosed()); group.close(); assertTrue(group.isClosed()); AbstractMetricGroup subgroup = (AbstractMetricGroup) group.addGroup("test subgroup"); assertTrue(subgroup.isClosed()); } @Test public void tolerateMetricNameCollisions() { final String name = "abctestname"; GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "testgroup"); assertNotNull(group.counter(name)); assertNotNull(group.counter(name)); } @Test public void tolerateMetricAndGroupNameCollisions() { final String name = "abctestname"; GenericMetricGroup group = new GenericMetricGroup( registry, new DummyAbstractMetricGroup(registry), "testgroup"); assertNotNull(group.addGroup(name)); assertNotNull(group.counter(name)); } @Test public void testCreateQueryServiceMetricInfo() { JobID jid = new JobID(); JobVertexID vid = new JobVertexID(); AbstractID eid = new AbstractID(); MetricRegistryImpl registry = new MetricRegistryImpl(defaultMetricRegistryConfiguration); TaskManagerMetricGroup tm = new TaskManagerMetricGroup(registry, "host", "id"); TaskManagerJobMetricGroup job = new TaskManagerJobMetricGroup(registry, tm, jid, "jobname"); TaskMetricGroup task = new TaskMetricGroup(registry, job, vid, eid, "taskName", 4, 5); GenericMetricGroup userGroup1 = new GenericMetricGroup(registry, task, "hello"); GenericMetricGroup userGroup2 = new GenericMetricGroup(registry, userGroup1, "world"); QueryScopeInfo.TaskQueryScopeInfo info1 = (QueryScopeInfo.TaskQueryScopeInfo) userGroup1.createQueryServiceMetricInfo(new DummyCharacterFilter()); assertEquals("hello", info1.scope); assertEquals(jid.toString(), info1.jobID); assertEquals(vid.toString(), info1.vertexID); assertEquals(4, info1.subtaskIndex); QueryScopeInfo.TaskQueryScopeInfo info2 = (QueryScopeInfo.TaskQueryScopeInfo) userGroup2.createQueryServiceMetricInfo(new DummyCharacterFilter()); assertEquals("hello.world", info2.scope); assertEquals(jid.toString(), info2.jobID); assertEquals(vid.toString(), info2.vertexID); assertEquals(4, info2.subtaskIndex); } private static class ExceptionOnRegisterRegistry extends MetricRegistryImpl { public ExceptionOnRegisterRegistry() { super(defaultMetricRegistryConfiguration); } @Override public void register(Metric metric, String name, AbstractMetricGroup parent) { fail("Metric should never be registered"); } @Override public void unregister(Metric metric, String name, AbstractMetricGroup parent) { fail("Metric should never be un-registered"); } } /** * A dummy {@link AbstractMetricGroup} to be used when a group is required as an argument but not actually used. */ public static class DummyAbstractMetricGroup extends AbstractMetricGroup { public DummyAbstractMetricGroup(MetricRegistry registry) { super(registry, new String[0], null); } @Override protected QueryScopeInfo createQueryServiceMetricInfo(CharacterFilter filter) { return null; } @Override protected String getGroupName(CharacterFilter filter) { return ""; } @Override protected void addMetric(String name, Metric metric) { } @Override public MetricGroup addGroup(String name) { return new DummyAbstractMetricGroup(registry); } } }
I really like these 4 lines.
private Tensor mappedHashJoin(Tensor a, Tensor b, TensorType joinedType) { TensorType commonDimensionType = commonDimensions(a, b); if (commonDimensionType.dimensions().isEmpty()) { return mappedGeneralJoin(a, b, joinedType); } Tensor smallerTensor = a.size() > b.size() ? b : a; Tensor largerTensor = a.size() > b.size() ? a : b; a = smallerTensor; b = largerTensor; Map<TensorAddress, List<Tensor.Cell>> aCellsByCommonAddress = new HashMap<>(); for (Iterator<Tensor.Cell> cellIterator = a.cellIterator(); cellIterator.hasNext(); ) { Tensor.Cell cell = cellIterator.next(); TensorAddress partialCommonAddress = partialCommonAddress(cell, a.type(), commonDimensionType); aCellsByCommonAddress.putIfAbsent(partialCommonAddress, new ArrayList<>()); aCellsByCommonAddress.get(partialCommonAddress).add(cell); } int[] aToIndexes = mapIndexes(a.type(), joinedType); int[] bToIndexes = mapIndexes(b.type(), joinedType); Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> cellIterator = b.cellIterator(); cellIterator.hasNext(); ) { Tensor.Cell cell = cellIterator.next(); TensorAddress partialCommonAddress = partialCommonAddress(cell, b.type(), commonDimensionType); for (Tensor.Cell aCell : aCellsByCommonAddress.getOrDefault(partialCommonAddress, Collections.emptyList())) { TensorAddress combinedAddress = joinAddresses(aCell.getKey(), aToIndexes, cell.getKey(), bToIndexes, joinedType); if (combinedAddress == null) continue; builder.cell(combinedAddress, combinator.applyAsDouble(aCell.getValue(), cell.getValue())); } } return builder.build(); }
b = largerTensor;
private Tensor mappedHashJoin(Tensor a, Tensor b, TensorType joinedType) { TensorType commonDimensionType = commonDimensions(a, b); if (commonDimensionType.dimensions().isEmpty()) { return mappedGeneralJoin(a, b, joinedType); } boolean swapTensors = a.size() > b.size(); if (swapTensors) { Tensor temp = a; a = b; b = temp; } int[] aIndexesInCommon = mapIndexes(commonDimensionType, a.type()); int[] bIndexesInCommon = mapIndexes(commonDimensionType, b.type()); int[] aIndexesInJoined = mapIndexes(a.type(), joinedType); int[] bIndexesInJoined = mapIndexes(b.type(), joinedType); Map<TensorAddress, List<Tensor.Cell>> aCellsByCommonAddress = new HashMap<>(); for (Iterator<Tensor.Cell> cellIterator = a.cellIterator(); cellIterator.hasNext(); ) { Tensor.Cell aCell = cellIterator.next(); TensorAddress partialCommonAddress = partialCommonAddress(aCell, aIndexesInCommon); aCellsByCommonAddress.putIfAbsent(partialCommonAddress, new ArrayList<>()); aCellsByCommonAddress.get(partialCommonAddress).add(aCell); } Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> cellIterator = b.cellIterator(); cellIterator.hasNext(); ) { Tensor.Cell bCell = cellIterator.next(); TensorAddress partialCommonAddress = partialCommonAddress(bCell, bIndexesInCommon); for (Tensor.Cell aCell : aCellsByCommonAddress.getOrDefault(partialCommonAddress, Collections.emptyList())) { TensorAddress combinedAddress = joinAddresses(aCell.getKey(), aIndexesInJoined, bCell.getKey(), bIndexesInJoined, joinedType); if (combinedAddress == null) continue; double combinedValue = swapTensors ? combinator.applyAsDouble(bCell.getValue(), aCell.getValue()) : combinator.applyAsDouble(aCell.getValue(), bCell.getValue()); builder.cell(combinedAddress, combinedValue); } } return builder.build(); }
class Join extends PrimitiveTensorFunction { private final TensorFunction argumentA, argumentB; private final DoubleBinaryOperator combinator; public Join(TensorFunction argumentA, TensorFunction argumentB, DoubleBinaryOperator combinator) { Objects.requireNonNull(argumentA, "The first argument tensor cannot be null"); Objects.requireNonNull(argumentB, "The second argument tensor cannot be null"); Objects.requireNonNull(combinator, "The combinator function cannot be null"); this.argumentA = argumentA; this.argumentB = argumentB; this.combinator = combinator; } public TensorFunction argumentA() { return argumentA; } public TensorFunction argumentB() { return argumentB; } public DoubleBinaryOperator combinator() { return combinator; } @Override public List<TensorFunction> functionArguments() { return ImmutableList.of(argumentA, argumentB); } @Override public TensorFunction replaceArguments(List<TensorFunction> arguments) { if ( arguments.size() != 2) throw new IllegalArgumentException("Join must have 2 arguments, got " + arguments.size()); return new Join(arguments.get(0), arguments.get(1), combinator); } @Override public PrimitiveTensorFunction toPrimitive() { return new Join(argumentA.toPrimitive(), argumentB.toPrimitive(), combinator); } @Override public String toString(ToStringContext context) { return "join(" + argumentA.toString(context) + ", " + argumentB.toString(context) + ", " + combinator + ")"; } @Override public Tensor evaluate(EvaluationContext context) { Tensor a = argumentA.evaluate(context); Tensor b = argumentB.evaluate(context); TensorType joinedType = new TensorType.Builder(a.type(), b.type()).build(); if (hasSingleIndexedDimension(a) && hasSingleIndexedDimension(b) && a.type().dimensions().get(0).name().equals(b.type().dimensions().get(0).name())) return indexedVectorJoin((IndexedTensor)a, (IndexedTensor)b, joinedType); else if (joinedType.dimensions().size() == a.type().dimensions().size() && joinedType.dimensions().size() == b.type().dimensions().size()) return singleSpaceJoin(a, b, joinedType); else if (a.type().dimensions().containsAll(b.type().dimensions())) return subspaceJoin(b, a, joinedType, true); else if (b.type().dimensions().containsAll(a.type().dimensions())) return subspaceJoin(a, b, joinedType, false); else return generalJoin(a, b, joinedType); } private boolean hasSingleIndexedDimension(Tensor tensor) { return tensor.type().dimensions().size() == 1 && tensor.type().dimensions().get(0).isIndexed(); } private Tensor indexedVectorJoin(IndexedTensor a, IndexedTensor b, TensorType type) { int joinedLength = Math.min(a.dimensionSizes().size(0), b.dimensionSizes().size(0)); Iterator<Double> aIterator = a.valueIterator(); Iterator<Double> bIterator = b.valueIterator(); IndexedTensor.Builder builder = IndexedTensor.Builder.of(type, new DimensionSizes.Builder(1).set(0, joinedLength).build()); for (int i = 0; i < joinedLength; i++) builder.cell(combinator.applyAsDouble(aIterator.next(), bIterator.next()), i); return builder.build(); } /** When both tensors have the same dimensions, at most one cell matches a cell in the other tensor */ private Tensor singleSpaceJoin(Tensor a, Tensor b, TensorType joinedType) { Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> i = a.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> aCell = i.next(); double bCellValue = b.get(aCell.getKey()); if (Double.isNaN(bCellValue)) continue; builder.cell(aCell.getKey(), combinator.applyAsDouble(aCell.getValue(), bCellValue)); } return builder.build(); } /** Join a tensor into a superspace */ private Tensor subspaceJoin(Tensor subspace, Tensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { if (subspace instanceof IndexedTensor && superspace instanceof IndexedTensor) return indexedSubspaceJoin((IndexedTensor) subspace, (IndexedTensor) superspace, joinedType, reversedArgumentOrder); else return generalSubspaceJoin(subspace, superspace, joinedType, reversedArgumentOrder); } private Tensor indexedSubspaceJoin(IndexedTensor subspace, IndexedTensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { if (subspace.size() == 0 || superspace.size() == 0) return Tensor.Builder.of(joinedType, new DimensionSizes.Builder(joinedType.dimensions().size()).build()).build(); DimensionSizes joinedSizes = joinedSize(joinedType, subspace, superspace); IndexedTensor.Builder builder = (IndexedTensor.Builder)Tensor.Builder.of(joinedType, joinedSizes); Set<String> superDimensionNames = new HashSet<>(superspace.type().dimensionNames()); superDimensionNames.removeAll(subspace.type().dimensionNames()); for (Iterator<IndexedTensor.SubspaceIterator> i = superspace.subspaceIterator(superDimensionNames, joinedSizes); i.hasNext(); ) { IndexedTensor.SubspaceIterator subspaceInSuper = i.next(); joinSubspaces(subspace.valueIterator(), subspace.size(), subspaceInSuper, subspaceInSuper.size(), reversedArgumentOrder, builder); } return builder.build(); } private void joinSubspaces(Iterator<Double> subspace, int subspaceSize, Iterator<Tensor.Cell> superspace, int superspaceSize, boolean reversedArgumentOrder, IndexedTensor.Builder builder) { int joinedLength = Math.min(subspaceSize, superspaceSize); if (reversedArgumentOrder) { for (int i = 0; i < joinedLength; i++) { Tensor.Cell supercell = superspace.next(); builder.cell(supercell, combinator.applyAsDouble(supercell.getValue(), subspace.next())); } } else { for (int i = 0; i < joinedLength; i++) { Tensor.Cell supercell = superspace.next(); builder.cell(supercell, combinator.applyAsDouble(subspace.next(), supercell.getValue())); } } } private DimensionSizes joinedSize(TensorType joinedType, IndexedTensor a, IndexedTensor b) { DimensionSizes.Builder builder = new DimensionSizes.Builder(joinedType.dimensions().size()); for (int i = 0; i < builder.dimensions(); i++) { String dimensionName = joinedType.dimensions().get(i).name(); Optional<Integer> aIndex = a.type().indexOfDimension(dimensionName); Optional<Integer> bIndex = b.type().indexOfDimension(dimensionName); if (aIndex.isPresent() && bIndex.isPresent()) builder.set(i, Math.min(b.dimensionSizes().size(bIndex.get()), a.dimensionSizes().size(aIndex.get()))); else if (aIndex.isPresent()) builder.set(i, a.dimensionSizes().size(aIndex.get())); else if (bIndex.isPresent()) builder.set(i, b.dimensionSizes().size(bIndex.get())); } return builder.build(); } private Tensor generalSubspaceJoin(Tensor subspace, Tensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { int[] subspaceIndexes = subspaceIndexes(superspace.type(), subspace.type()); Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> i = superspace.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> supercell = i.next(); TensorAddress subaddress = mapAddressToSubspace(supercell.getKey(), subspaceIndexes); double subspaceValue = subspace.get(subaddress); if ( ! Double.isNaN(subspaceValue)) builder.cell(supercell.getKey(), reversedArgumentOrder ? combinator.applyAsDouble(supercell.getValue(), subspaceValue) : combinator.applyAsDouble(subspaceValue, supercell.getValue())); } return builder.build(); } /** Returns the indexes in the superspace type which should be retained to create the subspace type */ private int[] subspaceIndexes(TensorType supertype, TensorType subtype) { int[] subspaceIndexes = new int[subtype.dimensions().size()]; for (int i = 0; i < subtype.dimensions().size(); i++) subspaceIndexes[i] = supertype.indexOfDimension(subtype.dimensions().get(i).name()).get(); return subspaceIndexes; } private TensorAddress mapAddressToSubspace(TensorAddress superAddress, int[] subspaceIndexes) { String[] subspaceLabels = new String[subspaceIndexes.length]; for (int i = 0; i < subspaceIndexes.length; i++) subspaceLabels[i] = superAddress.label(subspaceIndexes[i]); return TensorAddress.of(subspaceLabels); } /** Slow join which works for any two tensors */ private Tensor generalJoin(Tensor a, Tensor b, TensorType joinedType) { if (a instanceof IndexedTensor && b instanceof IndexedTensor) return indexedGeneralJoin((IndexedTensor) a, (IndexedTensor) b, joinedType); else return mappedHashJoin(a, b, joinedType); } private Tensor indexedGeneralJoin(IndexedTensor a, IndexedTensor b, TensorType joinedType) { DimensionSizes joinedSize = joinedSize(joinedType, a, b); Tensor.Builder builder = Tensor.Builder.of(joinedType, joinedSize); int[] aToIndexes = mapIndexes(a.type(), joinedType); int[] bToIndexes = mapIndexes(b.type(), joinedType); joinTo(a, b, joinedType, joinedSize, aToIndexes, bToIndexes, false, builder); joinTo(b, a, joinedType, joinedSize, bToIndexes, aToIndexes, true, builder); return builder.build(); } private void joinTo(IndexedTensor a, IndexedTensor b, TensorType joinedType, DimensionSizes joinedSize, int[] aToIndexes, int[] bToIndexes, boolean reversedOrder, Tensor.Builder builder) { Set<String> sharedDimensions = Sets.intersection(a.type().dimensionNames(), b.type().dimensionNames()); Set<String> dimensionsOnlyInA = Sets.difference(a.type().dimensionNames(), b.type().dimensionNames()); DimensionSizes aIterateSize = joinedSizeOf(a.type(), joinedType, joinedSize); DimensionSizes bIterateSize = joinedSizeOf(b.type(), joinedType, joinedSize); for (Iterator<IndexedTensor.SubspaceIterator> ia = a.subspaceIterator(dimensionsOnlyInA, aIterateSize); ia.hasNext(); ) { IndexedTensor.SubspaceIterator aSubspace = ia.next(); while (aSubspace.hasNext()) { Tensor.Cell aCell = aSubspace.next(); PartialAddress matchingBCells = partialAddress(a.type(), aSubspace.address(), sharedDimensions); for (IndexedTensor.SubspaceIterator bSubspace = b.cellIterator(matchingBCells, bIterateSize); bSubspace.hasNext(); ) { Tensor.Cell bCell = bSubspace.next(); TensorAddress joinedAddress = joinAddresses(aCell.getKey(), aToIndexes, bCell.getKey(), bToIndexes, joinedType); double joinedValue = reversedOrder ? combinator.applyAsDouble(bCell.getValue(), aCell.getValue()) : combinator.applyAsDouble(aCell.getValue(), bCell.getValue()); builder.cell(joinedAddress, joinedValue); } } } } private PartialAddress partialAddress(TensorType addressType, TensorAddress address, Set<String> retainDimensions) { PartialAddress.Builder builder = new PartialAddress.Builder(retainDimensions.size()); for (int i = 0; i < addressType.dimensions().size(); i++) if (retainDimensions.contains(addressType.dimensions().get(i).name())) builder.add(addressType.dimensions().get(i).name(), address.intLabel(i)); return builder.build(); } /** Returns the sizes from the joined sizes which are present in the type argument */ private DimensionSizes joinedSizeOf(TensorType type, TensorType joinedType, DimensionSizes joinedSizes) { DimensionSizes.Builder builder = new DimensionSizes.Builder(type.dimensions().size()); int dimensionIndex = 0; for (int i = 0; i < joinedType.dimensions().size(); i++) { if (type.dimensionNames().contains(joinedType.dimensions().get(i).name())) builder.set(dimensionIndex++, joinedSizes.size(i)); } return builder.build(); } private Tensor mappedGeneralJoin(Tensor a, Tensor b, TensorType joinedType) { int[] aToIndexes = mapIndexes(a.type(), joinedType); int[] bToIndexes = mapIndexes(b.type(), joinedType); Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> aIterator = a.cellIterator(); aIterator.hasNext(); ) { Map.Entry<TensorAddress, Double> aCell = aIterator.next(); for (Iterator<Tensor.Cell> bIterator = b.cellIterator(); bIterator.hasNext(); ) { Map.Entry<TensorAddress, Double> bCell = bIterator.next(); TensorAddress combinedAddress = joinAddresses(aCell.getKey(), aToIndexes, bCell.getKey(), bToIndexes, joinedType); if (combinedAddress == null) continue; builder.cell(combinedAddress, combinator.applyAsDouble(aCell.getValue(), bCell.getValue())); } } return builder.build(); } /** * Returns the an array having one entry in order for each dimension of fromType * containing the index at which toType contains the same dimension name. * That is, if the returned array contains n at index i then * fromType.dimensions().get(i).name.equals(toType.dimensions().get(n).name()) * If some dimension in fromType is not present in toType, the corresponding index will be -1 */ private int[] mapIndexes(TensorType fromType, TensorType toType) { int[] toIndexes = new int[fromType.dimensions().size()]; for (int i = 0; i < fromType.dimensions().size(); i++) toIndexes[i] = toType.indexOfDimension(fromType.dimensions().get(i).name()).orElse(-1); return toIndexes; } private TensorAddress joinAddresses(TensorAddress a, int[] aToIndexes, TensorAddress b, int[] bToIndexes, TensorType joinedType) { String[] joinedLabels = new String[joinedType.dimensions().size()]; mapContent(a, joinedLabels, aToIndexes); boolean compatible = mapContent(b, joinedLabels, bToIndexes); if ( ! compatible) return null; return TensorAddress.of(joinedLabels); } /** * Maps the content in the given list to the given array, using the given index map. * * @return true if the mapping was successful, false if one of the destination positions was * occupied by a different value */ private boolean mapContent(TensorAddress from, String[] to, int[] indexMap) { for (int i = 0; i < from.size(); i++) { int toIndex = indexMap[i]; if (to[toIndex] != null && ! to[toIndex].equals(from.label(i))) return false; to[toIndex] = from.label(i); } return true; } /** * Returns common dimension of a and b as a new tensor type */ private TensorType commonDimensions(Tensor a, Tensor b) { TensorType.Builder typeBuilder = new TensorType.Builder(); TensorType aType = a.type(); TensorType bType = b.type(); for (int i = 0; i < aType.dimensions().size(); ++i) { TensorType.Dimension aDim = aType.dimensions().get(i); for (int j = 0; j < bType.dimensions().size(); ++j) { TensorType.Dimension bDim = bType.dimensions().get(j); if (aDim.equals(bDim)) { typeBuilder.set(bDim); } } } return typeBuilder.build(); } private TensorAddress partialCommonAddress(Tensor.Cell cell, TensorType type, TensorType commonDimensions) { TensorAddress address = cell.getKey(); String[] labels = new String[commonDimensions.dimensions().size()]; for (int i = 0; i < labels.length; ++i) { String name = commonDimensions.dimensions().get(i).name(); int index = type.indexOfDimension(name).orElseThrow(RuntimeException::new); labels[i] = address.label(index); } return TensorAddress.of(labels); } }
class Join extends PrimitiveTensorFunction { private final TensorFunction argumentA, argumentB; private final DoubleBinaryOperator combinator; public Join(TensorFunction argumentA, TensorFunction argumentB, DoubleBinaryOperator combinator) { Objects.requireNonNull(argumentA, "The first argument tensor cannot be null"); Objects.requireNonNull(argumentB, "The second argument tensor cannot be null"); Objects.requireNonNull(combinator, "The combinator function cannot be null"); this.argumentA = argumentA; this.argumentB = argumentB; this.combinator = combinator; } public TensorFunction argumentA() { return argumentA; } public TensorFunction argumentB() { return argumentB; } public DoubleBinaryOperator combinator() { return combinator; } @Override public List<TensorFunction> functionArguments() { return ImmutableList.of(argumentA, argumentB); } @Override public TensorFunction replaceArguments(List<TensorFunction> arguments) { if ( arguments.size() != 2) throw new IllegalArgumentException("Join must have 2 arguments, got " + arguments.size()); return new Join(arguments.get(0), arguments.get(1), combinator); } @Override public PrimitiveTensorFunction toPrimitive() { return new Join(argumentA.toPrimitive(), argumentB.toPrimitive(), combinator); } @Override public String toString(ToStringContext context) { return "join(" + argumentA.toString(context) + ", " + argumentB.toString(context) + ", " + combinator + ")"; } @Override public Tensor evaluate(EvaluationContext context) { Tensor a = argumentA.evaluate(context); Tensor b = argumentB.evaluate(context); TensorType joinedType = new TensorType.Builder(a.type(), b.type()).build(); if (hasSingleIndexedDimension(a) && hasSingleIndexedDimension(b) && a.type().dimensions().get(0).name().equals(b.type().dimensions().get(0).name())) return indexedVectorJoin((IndexedTensor)a, (IndexedTensor)b, joinedType); else if (joinedType.dimensions().size() == a.type().dimensions().size() && joinedType.dimensions().size() == b.type().dimensions().size()) return singleSpaceJoin(a, b, joinedType); else if (a.type().dimensions().containsAll(b.type().dimensions())) return subspaceJoin(b, a, joinedType, true); else if (b.type().dimensions().containsAll(a.type().dimensions())) return subspaceJoin(a, b, joinedType, false); else return generalJoin(a, b, joinedType); } private boolean hasSingleIndexedDimension(Tensor tensor) { return tensor.type().dimensions().size() == 1 && tensor.type().dimensions().get(0).isIndexed(); } private Tensor indexedVectorJoin(IndexedTensor a, IndexedTensor b, TensorType type) { int joinedLength = Math.min(a.dimensionSizes().size(0), b.dimensionSizes().size(0)); Iterator<Double> aIterator = a.valueIterator(); Iterator<Double> bIterator = b.valueIterator(); IndexedTensor.Builder builder = IndexedTensor.Builder.of(type, new DimensionSizes.Builder(1).set(0, joinedLength).build()); for (int i = 0; i < joinedLength; i++) builder.cell(combinator.applyAsDouble(aIterator.next(), bIterator.next()), i); return builder.build(); } /** When both tensors have the same dimensions, at most one cell matches a cell in the other tensor */ private Tensor singleSpaceJoin(Tensor a, Tensor b, TensorType joinedType) { Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> i = a.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> aCell = i.next(); double bCellValue = b.get(aCell.getKey()); if (Double.isNaN(bCellValue)) continue; builder.cell(aCell.getKey(), combinator.applyAsDouble(aCell.getValue(), bCellValue)); } return builder.build(); } /** Join a tensor into a superspace */ private Tensor subspaceJoin(Tensor subspace, Tensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { if (subspace instanceof IndexedTensor && superspace instanceof IndexedTensor) return indexedSubspaceJoin((IndexedTensor) subspace, (IndexedTensor) superspace, joinedType, reversedArgumentOrder); else return generalSubspaceJoin(subspace, superspace, joinedType, reversedArgumentOrder); } private Tensor indexedSubspaceJoin(IndexedTensor subspace, IndexedTensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { if (subspace.size() == 0 || superspace.size() == 0) return Tensor.Builder.of(joinedType, new DimensionSizes.Builder(joinedType.dimensions().size()).build()).build(); DimensionSizes joinedSizes = joinedSize(joinedType, subspace, superspace); IndexedTensor.Builder builder = (IndexedTensor.Builder)Tensor.Builder.of(joinedType, joinedSizes); Set<String> superDimensionNames = new HashSet<>(superspace.type().dimensionNames()); superDimensionNames.removeAll(subspace.type().dimensionNames()); for (Iterator<IndexedTensor.SubspaceIterator> i = superspace.subspaceIterator(superDimensionNames, joinedSizes); i.hasNext(); ) { IndexedTensor.SubspaceIterator subspaceInSuper = i.next(); joinSubspaces(subspace.valueIterator(), subspace.size(), subspaceInSuper, subspaceInSuper.size(), reversedArgumentOrder, builder); } return builder.build(); } private void joinSubspaces(Iterator<Double> subspace, int subspaceSize, Iterator<Tensor.Cell> superspace, int superspaceSize, boolean reversedArgumentOrder, IndexedTensor.Builder builder) { int joinedLength = Math.min(subspaceSize, superspaceSize); if (reversedArgumentOrder) { for (int i = 0; i < joinedLength; i++) { Tensor.Cell supercell = superspace.next(); builder.cell(supercell, combinator.applyAsDouble(supercell.getValue(), subspace.next())); } } else { for (int i = 0; i < joinedLength; i++) { Tensor.Cell supercell = superspace.next(); builder.cell(supercell, combinator.applyAsDouble(subspace.next(), supercell.getValue())); } } } private DimensionSizes joinedSize(TensorType joinedType, IndexedTensor a, IndexedTensor b) { DimensionSizes.Builder builder = new DimensionSizes.Builder(joinedType.dimensions().size()); for (int i = 0; i < builder.dimensions(); i++) { String dimensionName = joinedType.dimensions().get(i).name(); Optional<Integer> aIndex = a.type().indexOfDimension(dimensionName); Optional<Integer> bIndex = b.type().indexOfDimension(dimensionName); if (aIndex.isPresent() && bIndex.isPresent()) builder.set(i, Math.min(b.dimensionSizes().size(bIndex.get()), a.dimensionSizes().size(aIndex.get()))); else if (aIndex.isPresent()) builder.set(i, a.dimensionSizes().size(aIndex.get())); else if (bIndex.isPresent()) builder.set(i, b.dimensionSizes().size(bIndex.get())); } return builder.build(); } private Tensor generalSubspaceJoin(Tensor subspace, Tensor superspace, TensorType joinedType, boolean reversedArgumentOrder) { int[] subspaceIndexes = subspaceIndexes(superspace.type(), subspace.type()); Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> i = superspace.cellIterator(); i.hasNext(); ) { Map.Entry<TensorAddress, Double> supercell = i.next(); TensorAddress subaddress = mapAddressToSubspace(supercell.getKey(), subspaceIndexes); double subspaceValue = subspace.get(subaddress); if ( ! Double.isNaN(subspaceValue)) builder.cell(supercell.getKey(), reversedArgumentOrder ? combinator.applyAsDouble(supercell.getValue(), subspaceValue) : combinator.applyAsDouble(subspaceValue, supercell.getValue())); } return builder.build(); } /** Returns the indexes in the superspace type which should be retained to create the subspace type */ private int[] subspaceIndexes(TensorType supertype, TensorType subtype) { int[] subspaceIndexes = new int[subtype.dimensions().size()]; for (int i = 0; i < subtype.dimensions().size(); i++) subspaceIndexes[i] = supertype.indexOfDimension(subtype.dimensions().get(i).name()).get(); return subspaceIndexes; } private TensorAddress mapAddressToSubspace(TensorAddress superAddress, int[] subspaceIndexes) { String[] subspaceLabels = new String[subspaceIndexes.length]; for (int i = 0; i < subspaceIndexes.length; i++) subspaceLabels[i] = superAddress.label(subspaceIndexes[i]); return TensorAddress.of(subspaceLabels); } /** Slow join which works for any two tensors */ private Tensor generalJoin(Tensor a, Tensor b, TensorType joinedType) { if (a instanceof IndexedTensor && b instanceof IndexedTensor) return indexedGeneralJoin((IndexedTensor) a, (IndexedTensor) b, joinedType); else return mappedHashJoin(a, b, joinedType); } private Tensor indexedGeneralJoin(IndexedTensor a, IndexedTensor b, TensorType joinedType) { DimensionSizes joinedSize = joinedSize(joinedType, a, b); Tensor.Builder builder = Tensor.Builder.of(joinedType, joinedSize); int[] aToIndexes = mapIndexes(a.type(), joinedType); int[] bToIndexes = mapIndexes(b.type(), joinedType); joinTo(a, b, joinedType, joinedSize, aToIndexes, bToIndexes, false, builder); joinTo(b, a, joinedType, joinedSize, bToIndexes, aToIndexes, true, builder); return builder.build(); } private void joinTo(IndexedTensor a, IndexedTensor b, TensorType joinedType, DimensionSizes joinedSize, int[] aToIndexes, int[] bToIndexes, boolean reversedOrder, Tensor.Builder builder) { Set<String> sharedDimensions = Sets.intersection(a.type().dimensionNames(), b.type().dimensionNames()); Set<String> dimensionsOnlyInA = Sets.difference(a.type().dimensionNames(), b.type().dimensionNames()); DimensionSizes aIterateSize = joinedSizeOf(a.type(), joinedType, joinedSize); DimensionSizes bIterateSize = joinedSizeOf(b.type(), joinedType, joinedSize); for (Iterator<IndexedTensor.SubspaceIterator> ia = a.subspaceIterator(dimensionsOnlyInA, aIterateSize); ia.hasNext(); ) { IndexedTensor.SubspaceIterator aSubspace = ia.next(); while (aSubspace.hasNext()) { Tensor.Cell aCell = aSubspace.next(); PartialAddress matchingBCells = partialAddress(a.type(), aSubspace.address(), sharedDimensions); for (IndexedTensor.SubspaceIterator bSubspace = b.cellIterator(matchingBCells, bIterateSize); bSubspace.hasNext(); ) { Tensor.Cell bCell = bSubspace.next(); TensorAddress joinedAddress = joinAddresses(aCell.getKey(), aToIndexes, bCell.getKey(), bToIndexes, joinedType); double joinedValue = reversedOrder ? combinator.applyAsDouble(bCell.getValue(), aCell.getValue()) : combinator.applyAsDouble(aCell.getValue(), bCell.getValue()); builder.cell(joinedAddress, joinedValue); } } } } private PartialAddress partialAddress(TensorType addressType, TensorAddress address, Set<String> retainDimensions) { PartialAddress.Builder builder = new PartialAddress.Builder(retainDimensions.size()); for (int i = 0; i < addressType.dimensions().size(); i++) if (retainDimensions.contains(addressType.dimensions().get(i).name())) builder.add(addressType.dimensions().get(i).name(), address.intLabel(i)); return builder.build(); } /** Returns the sizes from the joined sizes which are present in the type argument */ private DimensionSizes joinedSizeOf(TensorType type, TensorType joinedType, DimensionSizes joinedSizes) { DimensionSizes.Builder builder = new DimensionSizes.Builder(type.dimensions().size()); int dimensionIndex = 0; for (int i = 0; i < joinedType.dimensions().size(); i++) { if (type.dimensionNames().contains(joinedType.dimensions().get(i).name())) builder.set(dimensionIndex++, joinedSizes.size(i)); } return builder.build(); } private Tensor mappedGeneralJoin(Tensor a, Tensor b, TensorType joinedType) { int[] aToIndexes = mapIndexes(a.type(), joinedType); int[] bToIndexes = mapIndexes(b.type(), joinedType); Tensor.Builder builder = Tensor.Builder.of(joinedType); for (Iterator<Tensor.Cell> aIterator = a.cellIterator(); aIterator.hasNext(); ) { Map.Entry<TensorAddress, Double> aCell = aIterator.next(); for (Iterator<Tensor.Cell> bIterator = b.cellIterator(); bIterator.hasNext(); ) { Map.Entry<TensorAddress, Double> bCell = bIterator.next(); TensorAddress combinedAddress = joinAddresses(aCell.getKey(), aToIndexes, bCell.getKey(), bToIndexes, joinedType); if (combinedAddress == null) continue; builder.cell(combinedAddress, combinator.applyAsDouble(aCell.getValue(), bCell.getValue())); } } return builder.build(); } /** * Returns the an array having one entry in order for each dimension of fromType * containing the index at which toType contains the same dimension name. * That is, if the returned array contains n at index i then * fromType.dimensions().get(i).name.equals(toType.dimensions().get(n).name()) * If some dimension in fromType is not present in toType, the corresponding index will be -1 */ private int[] mapIndexes(TensorType fromType, TensorType toType) { int[] toIndexes = new int[fromType.dimensions().size()]; for (int i = 0; i < fromType.dimensions().size(); i++) toIndexes[i] = toType.indexOfDimension(fromType.dimensions().get(i).name()).orElse(-1); return toIndexes; } private TensorAddress joinAddresses(TensorAddress a, int[] aToIndexes, TensorAddress b, int[] bToIndexes, TensorType joinedType) { String[] joinedLabels = new String[joinedType.dimensions().size()]; mapContent(a, joinedLabels, aToIndexes); boolean compatible = mapContent(b, joinedLabels, bToIndexes); if ( ! compatible) return null; return TensorAddress.of(joinedLabels); } /** * Maps the content in the given list to the given array, using the given index map. * * @return true if the mapping was successful, false if one of the destination positions was * occupied by a different value */ private boolean mapContent(TensorAddress from, String[] to, int[] indexMap) { for (int i = 0; i < from.size(); i++) { int toIndex = indexMap[i]; if (to[toIndex] != null && ! to[toIndex].equals(from.label(i))) return false; to[toIndex] = from.label(i); } return true; } /** * Returns common dimension of a and b as a new tensor type */ private TensorType commonDimensions(Tensor a, Tensor b) { TensorType.Builder typeBuilder = new TensorType.Builder(); TensorType aType = a.type(); TensorType bType = b.type(); for (int i = 0; i < aType.dimensions().size(); ++i) { TensorType.Dimension aDim = aType.dimensions().get(i); for (int j = 0; j < bType.dimensions().size(); ++j) { TensorType.Dimension bDim = bType.dimensions().get(j); if (aDim.equals(bDim)) { typeBuilder.set(bDim); } } } return typeBuilder.build(); } private TensorAddress partialCommonAddress(Tensor.Cell cell, int[] indexMap) { TensorAddress address = cell.getKey(); String[] labels = new String[indexMap.length]; for (int i = 0; i < labels.length; ++i) { labels[i] = address.label(indexMap[i]); } return TensorAddress.of(labels); } }
throw new DdlException("Failed to remove worker. error: " + e.getMessage());
public void removeWorker(String workerIpPort) throws StarClientException { long workerId = -1; try { WorkerInfo workerInfo = client.getWorkerInfo(serviceId, workerIpPort); workerId = workerInfo.getWorkerId(); } catch (StarClientException e2) { LOG.warn(e2); return; } try { client.removeWorker(serviceId, workerId); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.NOT_EXIST) { throw new StarClientException(e.getCode(), "remove worker error"); } } workerToBackend.remove(workerId); workerToId.remove(workerIpPort); LOG.info("remove worker {} success in StarOSAgent", workerIpPort); }
throw new StarClientException(e.getCode(), "remove worker error");
public void removeWorker(String workerIpPort) throws DdlException { long workerId = -1; if (workerToId.containsKey(workerIpPort)) { workerId = workerToId.get(workerIpPort); } else { try { WorkerInfo workerInfo = client.getWorkerInfo(serviceId, workerIpPort); workerId = workerInfo.getWorkerId(); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.NOT_EXIST) { throw new DdlException("Failed to get worker id from starMgr. error: " + e.getMessage()); } LOG.info("worker {} not exist.", workerIpPort); return; } } try { client.removeWorker(serviceId, workerId); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.NOT_EXIST) { throw new DdlException("Failed to remove worker. error: " + e.getMessage()); } } workerToBackend.remove(workerId); workerToId.remove(workerIpPort); LOG.info("remove worker {} success from StarMgr", workerIpPort); }
class StarOSAgent { private static final Logger LOG = LogManager.getLogger(StarOSAgent.class); private StarClient client; private long serviceId; private Map<String, Long> workerToId; private Map<Long, Long> workerToBackend; public StarOSAgent() { serviceId = -1; if (Config.integrate_starmgr) { String[] starMgrAddr = Config.starmgr_address.split(":"); if (!starMgrAddr[0].equals("127.0.0.1")) { LOG.warn("Config.starmgr_address not equal 127.0.0.1, it is {}", starMgrAddr[0]); System.exit(-1); } } client = new StarClient(); client.connectServer(Config.starmgr_address); workerToId = Maps.newHashMap(); workerToBackend = Maps.newHashMap(); } public long getServiceId() { return serviceId; } public void setServiceId(long id) { this.serviceId = id; } public List<Long> createShards(int numShards) { return Lists.newArrayList(); } public long getPrimaryBackendIdByShard(long shardId) { return 0; } public Set<Long> getBackendIdsByShard(long shardId) { return Sets.newHashSet(); } public void registerAndBootstrapService(String serviceName) { if (serviceId != -1) { return; } try { client.registerService("starrocks"); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); System.exit(-1); } } try { serviceId = client.bootstrapService("starrocks", serviceName); LOG.info("get serviceId: {} by bootstrapService to starMgr", serviceId); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); System.exit(-1); } else { getServiceId(serviceName); } } } public void getServiceId(String serviceName) { if (serviceId != -1) { return; } try { ServiceInfo serviceInfo = client.getServiceInfo(serviceName); serviceId = serviceInfo.getServiceId(); } catch (StarClientException e) { LOG.warn(e); System.exit(-1); } LOG.info("get serviceId {} from starMgr", serviceId); } public long getWorkerId(String workerIpPort) { return workerToId.get(workerIpPort); } public void addWorker(long backendId, String workerIpPort) { if (serviceId == -1) { LOG.warn("When addWorker serviceId is -1"); return; } if (workerToId.containsKey(workerIpPort)) { return; } long workerId = -1; try { workerId = client.addWorker(serviceId, workerIpPort); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); return; } else { try { WorkerInfo workerInfo = client.getWorkerInfo(serviceId, workerIpPort); workerId = workerInfo.getWorkerId(); } catch (StarClientException e2) { LOG.warn(e2); return; } LOG.info("worker {} already added in starMgr", workerId); } } workerToId.put(workerIpPort, workerId); workerToBackend.put(workerId, backendId); LOG.info("add worker {} success, backendId is {}", workerId, backendId); } public long getWorkerIdByBackendId(long backendId) { long workerId = -1; for (Map.Entry<Long, Long> entry : workerToBackend.entrySet()) { if (entry.getValue() == backendId) { workerId = entry.getKey(); break; } } return workerId; } }
class StarOSAgent { private static final Logger LOG = LogManager.getLogger(StarOSAgent.class); private StarClient client; private long serviceId; private Map<String, Long> workerToId; private Map<Long, Long> workerToBackend; public StarOSAgent() { serviceId = -1; if (Config.integrate_starmgr) { String[] starMgrAddr = Config.starmgr_address.split(":"); if (!starMgrAddr[0].equals("127.0.0.1")) { LOG.warn("Config.starmgr_address not equal 127.0.0.1, it is {}", starMgrAddr[0]); System.exit(-1); } } client = new StarClient(); client.connectServer(Config.starmgr_address); workerToId = Maps.newHashMap(); workerToBackend = Maps.newHashMap(); } public long getServiceId() { return serviceId; } public void setServiceId(long id) { this.serviceId = id; } public List<Long> createShards(int numShards) { return Lists.newArrayList(); } public long getPrimaryBackendIdByShard(long shardId) { return 0; } public Set<Long> getBackendIdsByShard(long shardId) { return Sets.newHashSet(); } public void registerAndBootstrapService(String serviceName) { if (serviceId != -1) { return; } try { client.registerService("starrocks"); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); System.exit(-1); } } try { serviceId = client.bootstrapService("starrocks", serviceName); LOG.info("get serviceId: {} by bootstrapService to starMgr", serviceId); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); System.exit(-1); } else { getServiceId(serviceName); } } } public void getServiceId(String serviceName) { if (serviceId != -1) { return; } try { ServiceInfo serviceInfo = client.getServiceInfo(serviceName); serviceId = serviceInfo.getServiceId(); } catch (StarClientException e) { LOG.warn(e); System.exit(-1); } LOG.info("get serviceId {} from starMgr", serviceId); } public long getWorkerId(String workerIpPort) { return workerToId.get(workerIpPort); } public void addWorker(long backendId, String workerIpPort) { if (serviceId == -1) { LOG.warn("When addWorker serviceId is -1"); return; } if (workerToId.containsKey(workerIpPort)) { return; } long workerId = -1; try { workerId = client.addWorker(serviceId, workerIpPort); } catch (StarClientException e) { if (e.getCode() != StarClientException.ExceptionCode.ALREADY_EXIST) { LOG.warn(e); return; } else { try { WorkerInfo workerInfo = client.getWorkerInfo(serviceId, workerIpPort); workerId = workerInfo.getWorkerId(); } catch (StarClientException e2) { LOG.warn(e2); return; } LOG.info("worker {} already added in starMgr", workerId); } } workerToId.put(workerIpPort, workerId); workerToBackend.put(workerId, backendId); LOG.info("add worker {} success, backendId is {}", workerId, backendId); } public long getWorkerIdByBackendId(long backendId) { long workerId = -1; for (Map.Entry<Long, Long> entry : workerToBackend.entrySet()) { if (entry.getValue() == backendId) { workerId = entry.getKey(); break; } } return workerId; } }
ten parameters in a constructor, this is not that elegant
private DataSourceParameter crateDataSourceParameter() { return new DataSourceParameter("jdbc:mysql: }
return new DataSourceParameter("jdbc:mysql:
private DataSourceParameter crateDataSourceParameter() { return new DataSourceParameter("jdbc:mysql: }
class DataSourceParameterConverterTest { @Test public void assertGetDataSourceConfigurationMap() { Map<String, DataSourceParameter> dataSourceParameterMap = new HashMap<>(2, 1); dataSourceParameterMap.put("ds_0", crateDataSourceParameter()); dataSourceParameterMap.put("ds_1", crateDataSourceParameter()); Map<String, DataSourceConfiguration> actual = DataSourceParameterConverter.getDataSourceConfigurationMap(dataSourceParameterMap); assertThat(actual.size(), is(2)); assertParameter(actual.get("ds_0")); assertParameter(actual.get("ds_1")); } private void assertParameter(final DataSourceConfiguration actual) { Map<String, Object> props = actual.getProps(); assertThat(props.size(), is(9)); assertThat(props.get("jdbcUrl"), is("jdbc:mysql: assertThat(props.get("username"), is("root")); assertThat(props.get("password"), is("root")); assertNull(props.get("maximumPoolSize")); assertNull(props.get("minimumIdle")); assertNull(props.get("connectionTimeout")); assertNull(props.get("idleTimeout")); assertNull(props.get("maxLifetime")); } @Test public void assertGetDataSourceParameterMapFromYamlConfiguration() { YamlDataSourceParameter yamlDataSourceParameter0 = new YamlDataSourceParameter(); yamlDataSourceParameter0.setUrl("jdbc:mysql: yamlDataSourceParameter0.setCustomPoolProps(getCustomPoolProps()); setYamlDataSourceParameterPropertyWithoutUrl(yamlDataSourceParameter0); YamlDataSourceParameter yamlDataSourceParameter1 = new YamlDataSourceParameter(); yamlDataSourceParameter1.setUrl("jdbc:mysql: yamlDataSourceParameter1.setCustomPoolProps(getCustomPoolProps()); setYamlDataSourceParameterPropertyWithoutUrl(yamlDataSourceParameter1); Map<String, YamlDataSourceParameter> yamlDataSourceParameterMap = new HashMap<>(2, 1); yamlDataSourceParameterMap.put("ds_0", yamlDataSourceParameter0); yamlDataSourceParameterMap.put("ds_1", yamlDataSourceParameter1); Map<String, DataSourceParameter> actualDataSourceParameters = DataSourceParameterConverter.getDataSourceParameterMapFromYamlConfiguration(yamlDataSourceParameterMap); assertThat(actualDataSourceParameters.size(), is(2)); assertThat(actualDataSourceParameters.get("ds_0").getUrl(), is("jdbc:mysql: assertThat(actualDataSourceParameters.get("ds_1").getUrl(), is("jdbc:mysql: assertDataSourceParameter(actualDataSourceParameters.get("ds_0")); assertDataSourceParameter(actualDataSourceParameters.get("ds_1")); } private void setYamlDataSourceParameterPropertyWithoutUrl(final YamlDataSourceParameter yamlDataSourceParameter) { yamlDataSourceParameter.setMaxPoolSize(50); yamlDataSourceParameter.setMinPoolSize(1); yamlDataSourceParameter.setConnectionTimeoutMilliseconds(30 * 1000L); yamlDataSourceParameter.setIdleTimeoutMilliseconds(60 * 1000L); yamlDataSourceParameter.setMaxLifetimeMilliseconds(0L); yamlDataSourceParameter.setUsername("root"); yamlDataSourceParameter.setPassword("root"); } private void assertDataSourceParameter(final DataSourceParameter dataSourceParameter) { assertThat(dataSourceParameter.getMaxPoolSize(), is(50)); assertThat(dataSourceParameter.getMinPoolSize(), is(1)); assertThat(dataSourceParameter.getConnectionTimeoutMilliseconds(), is(30 * 1000L)); assertThat(dataSourceParameter.getIdleTimeoutMilliseconds(), is(60 * 1000L)); assertThat(dataSourceParameter.getMaxLifetimeMilliseconds(), is(0L)); assertThat(dataSourceParameter.getUsername(), is("root")); assertThat(dataSourceParameter.getPassword(), is("root")); assertThat(dataSourceParameter.getCustomPoolProps().size(), is(2)); assertThat(dataSourceParameter.getCustomPoolProps().get("maxPoolSize"), is(30)); assertThat(dataSourceParameter.getCustomPoolProps().get("idleTimeoutMilliseconds"), is("30000")); } private Properties getCustomPoolProps() { Properties result = new Properties(); result.put("maxPoolSize", 30); result.put("idleTimeoutMilliseconds", "30000"); return result; } }
class DataSourceParameterConverterTest { @Test public void assertGetDataSourceConfigurationMap() { Map<String, DataSourceParameter> dataSourceParameterMap = new HashMap<>(2, 1); dataSourceParameterMap.put("ds_0", crateDataSourceParameter()); dataSourceParameterMap.put("ds_1", crateDataSourceParameter()); Map<String, DataSourceConfiguration> actual = DataSourceParameterConverter.getDataSourceConfigurationMap(dataSourceParameterMap); assertThat(actual.size(), is(2)); assertParameter(actual.get("ds_0")); assertParameter(actual.get("ds_1")); } private void assertParameter(final DataSourceConfiguration actual) { Map<String, Object> props = actual.getProps(); assertThat(props.size(), is(9)); assertThat(props.get("jdbcUrl"), is("jdbc:mysql: assertThat(props.get("username"), is("root")); assertThat(props.get("password"), is("root")); assertNull(props.get("maximumPoolSize")); assertNull(props.get("minimumIdle")); assertNull(props.get("connectionTimeout")); assertNull(props.get("idleTimeout")); assertNull(props.get("maxLifetime")); } @Test public void assertGetDataSourceParameterMapFromYamlConfiguration() { YamlDataSourceParameter yamlDataSourceParameter0 = new YamlDataSourceParameter(); yamlDataSourceParameter0.setUrl("jdbc:mysql: yamlDataSourceParameter0.setCustomPoolProps(getCustomPoolProps()); setYamlDataSourceParameterPropertyWithoutUrl(yamlDataSourceParameter0); YamlDataSourceParameter yamlDataSourceParameter1 = new YamlDataSourceParameter(); yamlDataSourceParameter1.setUrl("jdbc:mysql: yamlDataSourceParameter1.setCustomPoolProps(getCustomPoolProps()); setYamlDataSourceParameterPropertyWithoutUrl(yamlDataSourceParameter1); Map<String, YamlDataSourceParameter> yamlDataSourceParameterMap = new HashMap<>(2, 1); yamlDataSourceParameterMap.put("ds_0", yamlDataSourceParameter0); yamlDataSourceParameterMap.put("ds_1", yamlDataSourceParameter1); Map<String, DataSourceParameter> actualDataSourceParameters = DataSourceParameterConverter.getDataSourceParameterMapFromYamlConfiguration(yamlDataSourceParameterMap); assertThat(actualDataSourceParameters.size(), is(2)); assertThat(actualDataSourceParameters.get("ds_0").getUrl(), is("jdbc:mysql: assertThat(actualDataSourceParameters.get("ds_1").getUrl(), is("jdbc:mysql: assertDataSourceParameter(actualDataSourceParameters.get("ds_0")); assertDataSourceParameter(actualDataSourceParameters.get("ds_1")); } private void setYamlDataSourceParameterPropertyWithoutUrl(final YamlDataSourceParameter yamlDataSourceParameter) { yamlDataSourceParameter.setMaxPoolSize(50); yamlDataSourceParameter.setMinPoolSize(1); yamlDataSourceParameter.setConnectionTimeoutMilliseconds(30 * 1000L); yamlDataSourceParameter.setIdleTimeoutMilliseconds(60 * 1000L); yamlDataSourceParameter.setMaxLifetimeMilliseconds(0L); yamlDataSourceParameter.setUsername("root"); yamlDataSourceParameter.setPassword("root"); } private void assertDataSourceParameter(final DataSourceParameter dataSourceParameter) { assertThat(dataSourceParameter.getMaxPoolSize(), is(50)); assertThat(dataSourceParameter.getMinPoolSize(), is(1)); assertThat(dataSourceParameter.getConnectionTimeoutMilliseconds(), is(30 * 1000L)); assertThat(dataSourceParameter.getIdleTimeoutMilliseconds(), is(60 * 1000L)); assertThat(dataSourceParameter.getMaxLifetimeMilliseconds(), is(0L)); assertThat(dataSourceParameter.getUsername(), is("root")); assertThat(dataSourceParameter.getPassword(), is("root")); assertThat(dataSourceParameter.getCustomPoolProps().size(), is(2)); assertThat(dataSourceParameter.getCustomPoolProps().get("maxPoolSize"), is(30)); assertThat(dataSourceParameter.getCustomPoolProps().get("idleTimeoutMilliseconds"), is("30000")); } private Properties getCustomPoolProps() { Properties result = new Properties(); result.put("maxPoolSize", 30); result.put("idleTimeoutMilliseconds", "30000"); return result; } }
@bobby-richard Have a look at https://issues.apache.org/jira/browse/FLINK-29267
protected JdbcSerializationConverter createExternalConverter(LogicalType type) { switch (type.getTypeRoot()) { case CHAR: case VARCHAR: return (val, index, statement) -> { String valString = val.getString(index).toString(); if (UUID_REGEX_PATTERN.matcher(valString).matches()) { statement.setObject(index, UUID.fromString(valString)); } else { statement.setString(index, valString); } }; } return super.createExternalConverter(type); }
if (UUID_REGEX_PATTERN.matcher(valString).matches()) {
protected JdbcSerializationConverter createExternalConverter(LogicalType type) { switch (type.getTypeRoot()) { case CHAR: case VARCHAR: return (val, index, statement) -> { String valString = val.getString(index).toString(); if (UUID_REGEX_PATTERN.matcher(valString).matches()) { statement.setObject(index, UUID.fromString(valString)); } else { statement.setString(index, valString); } }; } return super.createExternalConverter(type); }
class PostgresRowConverter extends AbstractJdbcRowConverter { private static final long serialVersionUID = 1L; private static final Pattern UUID_REGEX_PATTERN = Pattern.compile("^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$"); @Override public String converterName() { return "PostgreSQL"; } public PostgresRowConverter(RowType rowType) { super(rowType); } @Override public JdbcDeserializationConverter createInternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { ArrayType arrayType = (ArrayType) type; return createPostgresArrayConverter(arrayType); } else { return createPrimitiveConverter(type); } } @Override @Override protected JdbcSerializationConverter createNullableExternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { return (val, index, statement) -> { throw new IllegalStateException( String.format( "Writing ARRAY type is not yet supported in JDBC:%s.", converterName())); }; } else { return super.createNullableExternalConverter(type); } } private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) { final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType()); return val -> { PgArray pgArray = (PgArray) val; Object[] in = (Object[]) pgArray.getArray(); final Object[] array = (Object[]) Array.newInstance(elementClass, in.length); for (int i = 0; i < in.length; i++) { array[i] = elementConverter.deserialize(in[i]); } return new GenericArrayData(array); }; } private JdbcDeserializationConverter createPrimitiveConverter(LogicalType type) { return super.createInternalConverter(type); } }
class PostgresRowConverter extends AbstractJdbcRowConverter { private static final long serialVersionUID = 1L; private static final Pattern UUID_REGEX_PATTERN = Pattern.compile("^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$"); @Override public String converterName() { return "PostgreSQL"; } public PostgresRowConverter(RowType rowType) { super(rowType); } @Override public JdbcDeserializationConverter createInternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { ArrayType arrayType = (ArrayType) type; return createPostgresArrayConverter(arrayType); } else { return createPrimitiveConverter(type); } } @Override @Override protected JdbcSerializationConverter createNullableExternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { return (val, index, statement) -> { throw new IllegalStateException( String.format( "Writing ARRAY type is not yet supported in JDBC:%s.", converterName())); }; } else { return super.createNullableExternalConverter(type); } } private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) { final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType()); return val -> { PgArray pgArray = (PgArray) val; Object[] in = (Object[]) pgArray.getArray(); final Object[] array = (Object[]) Array.newInstance(elementClass, in.length); for (int i = 0; i < in.length; i++) { array[i] = elementConverter.deserialize(in[i]); } return new GenericArrayData(array); }; } private JdbcDeserializationConverter createPrimitiveConverter(LogicalType type) { return super.createInternalConverter(type); } }
This is how the cool kids on the street do it
private static void addRestApiHandler(ContainerCluster<?> cluster, Options options) { String handlerClassName = options.useNewRestapiHandler ? "com.yahoo.document.restapi.resource.DocumentV1ApiHandler" : "com.yahoo.document.restapi.resource.RestApi"; var handler = newVespaClientHandler(handlerClassName, "/document/v1/*", options); cluster.addComponent(handler); if (!options.useNewRestapiHandler) { var executor = new Threadpool( "restapi-handler", cluster, options.restApiThreadpoolOptions, options.feedThreadPoolSizeFactor); handler.inject(executor); handler.addComponent(executor); } }
handler.inject(executor);
private static void addRestApiHandler(ContainerCluster<?> cluster, Options options) { String handlerClassName = options.useNewRestapiHandler ? "com.yahoo.document.restapi.resource.DocumentV1ApiHandler" : "com.yahoo.document.restapi.resource.RestApi"; var handler = newVespaClientHandler(handlerClassName, "/document/v1/*", options); cluster.addComponent(handler); if (!options.useNewRestapiHandler) { var executor = new Threadpool( "restapi-handler", cluster, options.restApiThreadpoolOptions, options.feedThreadPoolSizeFactor); handler.inject(executor); handler.addComponent(executor); } }
class ContainerDocumentApi { private static final int FALLBACK_MAX_POOL_SIZE = 0; private static final int FALLBACK_CORE_POOL_SIZE = 0; public ContainerDocumentApi(ContainerCluster<?> cluster, Options options) { addRestApiHandler(cluster, options); addFeedHandler(cluster, options); } private static void addFeedHandler(ContainerCluster<?> cluster, Options options) { String bindingSuffix = ContainerCluster.RESERVED_URI_PREFIX + "/feedapi"; var handler = newVespaClientHandler( "com.yahoo.vespa.http.server.FeedHandler", bindingSuffix, options); cluster.addComponent(handler); var executor = new Threadpool( "feedapi-handler", cluster, options.feedApiThreadpoolOptions, options.feedThreadPoolSizeFactor); handler.inject(executor); handler.addComponent(executor); } private static Handler<AbstractConfigProducer<?>> newVespaClientHandler( String componentId, String bindingSuffix, Options options) { Handler<AbstractConfigProducer<?>> handler = new Handler<>(new ComponentModel( BundleInstantiationSpecification.getFromStrings(componentId, null, "vespaclient-container-plugin"), "")); if (options.bindings.isEmpty()) { handler.addServerBindings( SystemBindingPattern.fromHttpPath(bindingSuffix), SystemBindingPattern.fromHttpPath(bindingSuffix + '/')); } else { for (String rootBinding : options.bindings) { String pathWithoutLeadingSlash = bindingSuffix.substring(1); handler.addServerBindings( UserBindingPattern.fromPattern(rootBinding + pathWithoutLeadingSlash), UserBindingPattern.fromPattern(rootBinding + pathWithoutLeadingSlash + '/')); } } return handler; } public static final class Options { private final Collection<String> bindings; private final ContainerThreadpool.UserOptions restApiThreadpoolOptions; private final ContainerThreadpool.UserOptions feedApiThreadpoolOptions; private final double feedThreadPoolSizeFactor; private final boolean useNewRestapiHandler; public Options(Collection<String> bindings, ContainerThreadpool.UserOptions restApiThreadpoolOptions, ContainerThreadpool.UserOptions feedApiThreadpoolOptions, double feedThreadPoolSizeFactor, boolean useNewRestapiHandler) { this.bindings = Collections.unmodifiableCollection(bindings); this.restApiThreadpoolOptions = restApiThreadpoolOptions; this.feedApiThreadpoolOptions = feedApiThreadpoolOptions; this.feedThreadPoolSizeFactor = feedThreadPoolSizeFactor; this.useNewRestapiHandler = useNewRestapiHandler; } } private static class Threadpool extends ContainerThreadpool { private final ContainerCluster<?> cluster; private final double feedThreadPoolSizeFactor; Threadpool(String name, ContainerCluster<?> cluster, ContainerThreadpool.UserOptions threadpoolOptions, double feedThreadPoolSizeFactor ) { super(name, threadpoolOptions); this.cluster = cluster; this.feedThreadPoolSizeFactor = feedThreadPoolSizeFactor; } @Override public void getConfig(ContainerThreadpoolConfig.Builder builder) { super.getConfig(builder); if (hasUserOptions()) return; builder.maxThreads(maxPoolSize()); builder.minThreads(minPoolSize()); builder.queueSize(500); } private int maxPoolSize() { double vcpu = vcpu(cluster); if (vcpu == 0) return FALLBACK_MAX_POOL_SIZE; return Math.max(2, (int)Math.ceil(vcpu * feedThreadPoolSizeFactor)); } private int minPoolSize() { double vcpu = vcpu(cluster); if (vcpu == 0) return FALLBACK_CORE_POOL_SIZE; return Math.max(1, (int)Math.ceil(vcpu * feedThreadPoolSizeFactor * 0.5)); } } }
class ContainerDocumentApi { private static final int FALLBACK_MAX_POOL_SIZE = 0; private static final int FALLBACK_CORE_POOL_SIZE = 0; public ContainerDocumentApi(ContainerCluster<?> cluster, Options options) { addRestApiHandler(cluster, options); addFeedHandler(cluster, options); } private static void addFeedHandler(ContainerCluster<?> cluster, Options options) { String bindingSuffix = ContainerCluster.RESERVED_URI_PREFIX + "/feedapi"; var handler = newVespaClientHandler( "com.yahoo.vespa.http.server.FeedHandler", bindingSuffix, options); cluster.addComponent(handler); var executor = new Threadpool( "feedapi-handler", cluster, options.feedApiThreadpoolOptions, options.feedThreadPoolSizeFactor); handler.inject(executor); handler.addComponent(executor); } private static Handler<AbstractConfigProducer<?>> newVespaClientHandler( String componentId, String bindingSuffix, Options options) { Handler<AbstractConfigProducer<?>> handler = new Handler<>(new ComponentModel( BundleInstantiationSpecification.getFromStrings(componentId, null, "vespaclient-container-plugin"), "")); if (options.bindings.isEmpty()) { handler.addServerBindings( SystemBindingPattern.fromHttpPath(bindingSuffix), SystemBindingPattern.fromHttpPath(bindingSuffix + '/')); } else { for (String rootBinding : options.bindings) { String pathWithoutLeadingSlash = bindingSuffix.substring(1); handler.addServerBindings( UserBindingPattern.fromPattern(rootBinding + pathWithoutLeadingSlash), UserBindingPattern.fromPattern(rootBinding + pathWithoutLeadingSlash + '/')); } } return handler; } public static final class Options { private final Collection<String> bindings; private final ContainerThreadpool.UserOptions restApiThreadpoolOptions; private final ContainerThreadpool.UserOptions feedApiThreadpoolOptions; private final double feedThreadPoolSizeFactor; private final boolean useNewRestapiHandler; public Options(Collection<String> bindings, ContainerThreadpool.UserOptions restApiThreadpoolOptions, ContainerThreadpool.UserOptions feedApiThreadpoolOptions, double feedThreadPoolSizeFactor, boolean useNewRestapiHandler) { this.bindings = Collections.unmodifiableCollection(bindings); this.restApiThreadpoolOptions = restApiThreadpoolOptions; this.feedApiThreadpoolOptions = feedApiThreadpoolOptions; this.feedThreadPoolSizeFactor = feedThreadPoolSizeFactor; this.useNewRestapiHandler = useNewRestapiHandler; } } private static class Threadpool extends ContainerThreadpool { private final ContainerCluster<?> cluster; private final double feedThreadPoolSizeFactor; Threadpool(String name, ContainerCluster<?> cluster, ContainerThreadpool.UserOptions threadpoolOptions, double feedThreadPoolSizeFactor ) { super(name, threadpoolOptions); this.cluster = cluster; this.feedThreadPoolSizeFactor = feedThreadPoolSizeFactor; } @Override public void getConfig(ContainerThreadpoolConfig.Builder builder) { super.getConfig(builder); if (hasUserOptions()) return; builder.maxThreads(maxPoolSize()); builder.minThreads(minPoolSize()); builder.queueSize(500); } private int maxPoolSize() { double vcpu = vcpu(cluster); if (vcpu == 0) return FALLBACK_MAX_POOL_SIZE; return Math.max(2, (int)Math.ceil(vcpu * feedThreadPoolSizeFactor)); } private int minPoolSize() { double vcpu = vcpu(cluster); if (vcpu == 0) return FALLBACK_CORE_POOL_SIZE; return Math.max(1, (int)Math.ceil(vcpu * feedThreadPoolSizeFactor * 0.5)); } } }
We should use default values if `locationHeader` and `allowedMethods` are null or *empty* and document it.
public DefaultRedirectStrategy(int maxAttempts, String locationHeader, Set<HttpMethod> allowedMethods) { if (maxAttempts < 0) { throw logger.logExceptionAsError(new IllegalArgumentException("Max attempts cannot be less than 0.")); } this.maxAttempts = maxAttempts; this.locationHeader = locationHeader == null ? DEFAULT_REDIRECT_LOCATION_HEADER_NAME : locationHeader; this.redirectMethods = allowedMethods == null ? DEFAULT_REDIRECT_ALLOWED_METHODS : allowedMethods; }
this.redirectMethods = allowedMethods == null ? DEFAULT_REDIRECT_ALLOWED_METHODS : allowedMethods;
public DefaultRedirectStrategy(int maxAttempts, String locationHeader, Set<HttpMethod> allowedMethods) { if (maxAttempts < 0) { throw logger.logExceptionAsError(new IllegalArgumentException("Max attempts cannot be less than 0.")); } this.maxAttempts = maxAttempts; if (CoreUtils.isNullOrEmpty(locationHeader)) { logger.error("'locationHeader' provided as null will be defaulted to {}", DEFAULT_REDIRECT_LOCATION_HEADER_NAME); this.locationHeader = DEFAULT_REDIRECT_LOCATION_HEADER_NAME; } else { this.locationHeader = locationHeader; } if (CoreUtils.isNullOrEmpty(allowedMethods)) { logger.error("'allowedMethods' provided as null will be defaulted to {}", DEFAULT_REDIRECT_ALLOWED_METHODS); this.redirectMethods = DEFAULT_REDIRECT_ALLOWED_METHODS; } else { this.redirectMethods = allowedMethods; } }
class DefaultRedirectStrategy implements RedirectStrategy { private final ClientLogger logger = new ClientLogger(DefaultRedirectStrategy.class); private static final int DEFAULT_MAX_REDIRECT_ATTEMPTS = 3; private static final String DEFAULT_REDIRECT_LOCATION_HEADER_NAME = "Location"; private static final int PERMANENT_REDIRECT_STATUS_CODE = 308; private static final int TEMPORARY_REDIRECT_STATUS_CODE = 307; private static final Set<HttpMethod> DEFAULT_REDIRECT_ALLOWED_METHODS = new HashSet<HttpMethod>() { { add(HttpMethod.GET); add(HttpMethod.HEAD); } }; private final int maxAttempts; private final String locationHeader; private final Set<HttpMethod> redirectMethods; /** * Creates an instance of {@link DefaultRedirectStrategy} with a maximum number of redirect attempts 3, * header name "Location" to locate the redirect url in the response headers and {@link HttpMethod * and {@link HttpMethod */ public DefaultRedirectStrategy() { this(DEFAULT_MAX_REDIRECT_ATTEMPTS, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS); } /** * Creates an instance of {@link DefaultRedirectStrategy} with the provided number of redirect attempts and * default header name "Location" to locate the redirect url in the response headers and {@link HttpMethod * and {@link HttpMethod * * @param maxAttempts The max number of redirect attempts that can be made. * @throws IllegalArgumentException if {@code maxAttempts} is less than 0. */ public DefaultRedirectStrategy(int maxAttempts) { this(maxAttempts, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS); } /** * Creates an instance of {@link DefaultRedirectStrategy}. * * @param maxAttempts The max number of redirect attempts that can be made. * @param locationHeader The header name containing the redirect URL. * @param allowedMethods The set of {@link HttpMethod} that are allowed to be redirected. * @throws IllegalArgumentException if {@code maxAttempts} is less than 0. */ @Override public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { String redirectUrl = tryGetRedirectHeader(httpResponse.getHeaders(), this.getLocationHeader()); if (isValidRedirectCount(tryCount) && redirectUrl != null && !alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls) && isValidRedirectStatusCode(httpResponse.getStatusCode()) && isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { logger.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", tryCount, attemptedRedirectUrls.toString()); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } @Override public HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = tryGetRedirectHeader(httpResponse.getHeaders(), this.getLocationHeader()); return httpResponse.getRequest().setUrl(responseLocation); } @Override public int getMaxAttempts() { return maxAttempts; } @Override public String getLocationHeader() { return locationHeader; } @Override public Set<HttpMethod> getAllowedMethods() { return redirectMethods; } /** * Check if the redirect url provided in the response headers is already attempted. * * @param redirectUrl the redirect url provided in the response header. * @param attemptedRedirectUrls the set containing a list of attempted redirect locations. * @return {@code true} if the redirectUrl provided in the response header is already being attempted for redirect * , {@code false} otherwise. */ private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { logger.error(String.format("Request was redirected more than once to: %s", redirectUrl)); return true; } return false; } /** * Check if the attempt count of the redirect is less than the {@code maxAttempts} * * @param tryCount the try count for the HTTP request associated to the HTTP response. * @return {@code true} if the {@code tryCount} is greater than the {@code maxAttempts}, {@code false} otherwise. */ private boolean isValidRedirectCount(int tryCount) { if (tryCount >= getMaxAttempts()) { logger.error(String.format("Request has been redirected more than %d times.", getMaxAttempts())); return false; } return true; } /** * Check if the request http method is a valid redirect method. * * @param httpMethod the http method of the request. * @return {@code true} if the request {@code httpMethod} is a valid http redirect method, {@code false} otherwise. */ private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (getAllowedMethods().contains(httpMethod)) { return true; } else { logger.error( String.format("Request was redirected from an invalid redirect allowed method: %s", httpMethod)); return false; } } /** * Checks if the incoming request status code is a valid redirect status code. * * @param statusCode the status code of the incoming request. * @return {@code true} if the request {@code statusCode} is a valid http redirect method, {@code false} otherwise. */ private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM || statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } /** * Gets the redirect url from the response headers. * * @param headers the http response headers. * @param headerName the header name to look up value for. * @return the header value for the provided header name, {@code null} otherwise. */ private String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { logger.error(String.format("Redirect url was null for header name: %s, Request redirect was terminated" , headerName)); return null; } else { return headerValue; } } }
class DefaultRedirectStrategy implements RedirectStrategy { private final ClientLogger logger = new ClientLogger(DefaultRedirectStrategy.class); private static final int DEFAULT_MAX_REDIRECT_ATTEMPTS = 3; private static final String DEFAULT_REDIRECT_LOCATION_HEADER_NAME = "Location"; private static final int PERMANENT_REDIRECT_STATUS_CODE = 308; private static final int TEMPORARY_REDIRECT_STATUS_CODE = 307; private static final Set<HttpMethod> DEFAULT_REDIRECT_ALLOWED_METHODS = new HashSet<HttpMethod>() { { add(HttpMethod.GET); add(HttpMethod.HEAD); } }; private final int maxAttempts; private final String locationHeader; private final Set<HttpMethod> redirectMethods; /** * Creates an instance of {@link DefaultRedirectStrategy} with a maximum number of redirect attempts 3, * header name "Location" to locate the redirect url in the response headers and {@link HttpMethod * and {@link HttpMethod */ public DefaultRedirectStrategy() { this(DEFAULT_MAX_REDIRECT_ATTEMPTS, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS); } /** * Creates an instance of {@link DefaultRedirectStrategy} with the provided number of redirect attempts and * default header name "Location" to locate the redirect url in the response headers and {@link HttpMethod * and {@link HttpMethod * * @param maxAttempts The max number of redirect attempts that can be made. * @throws IllegalArgumentException if {@code maxAttempts} is less than 0. */ public DefaultRedirectStrategy(int maxAttempts) { this(maxAttempts, DEFAULT_REDIRECT_LOCATION_HEADER_NAME, DEFAULT_REDIRECT_ALLOWED_METHODS); } /** * Creates an instance of {@link DefaultRedirectStrategy}. * * @param maxAttempts The max number of redirect attempts that can be made. * @param locationHeader The header name containing the redirect URL. * @param allowedMethods The set of {@link HttpMethod} that are allowed to be redirected. * @throws IllegalArgumentException if {@code maxAttempts} is less than 0. */ @Override public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { String redirectUrl = tryGetRedirectHeader(httpResponse.getHeaders(), this.getLocationHeader()); if (isValidRedirectCount(tryCount) && redirectUrl != null && !alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls) && isValidRedirectStatusCode(httpResponse.getStatusCode()) && isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { logger.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", tryCount, attemptedRedirectUrls.toString()); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } @Override public HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = tryGetRedirectHeader(httpResponse.getHeaders(), this.getLocationHeader()); return httpResponse.getRequest().setUrl(responseLocation); } @Override public int getMaxAttempts() { return maxAttempts; } @Override public String getLocationHeader() { return locationHeader; } @Override public Set<HttpMethod> getAllowedMethods() { return redirectMethods; } /** * Check if the redirect url provided in the response headers is already attempted. * * @param redirectUrl the redirect url provided in the response header. * @param attemptedRedirectUrls the set containing a list of attempted redirect locations. * @return {@code true} if the redirectUrl provided in the response header is already being attempted for redirect * , {@code false} otherwise. */ private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { logger.error("Request was redirected more than once to: {}", redirectUrl); return true; } return false; } /** * Check if the attempt count of the redirect is less than the {@code maxAttempts} * * @param tryCount the try count for the HTTP request associated to the HTTP response. * @return {@code true} if the {@code tryCount} is greater than the {@code maxAttempts}, {@code false} otherwise. */ private boolean isValidRedirectCount(int tryCount) { if (tryCount >= getMaxAttempts()) { logger.error("Request has been redirected more than {} times.", getMaxAttempts()); return false; } return true; } /** * Check if the request http method is a valid redirect method. * * @param httpMethod the http method of the request. * @return {@code true} if the request {@code httpMethod} is a valid http redirect method, {@code false} otherwise. */ private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (getAllowedMethods().contains(httpMethod)) { return true; } else { logger.error("Request was redirected from an invalid redirect allowed method: {}", httpMethod); return false; } } /** * Checks if the incoming request status code is a valid redirect status code. * * @param statusCode the status code of the incoming request. * @return {@code true} if the request {@code statusCode} is a valid http redirect method, {@code false} otherwise. */ private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM || statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } /** * Gets the redirect url from the response headers. * * @param headers the http response headers. * @param headerName the header name to look up value for. * @return the header value for the provided header name, {@code null} otherwise. */ String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { logger.error("Redirect url was null for header name: {}, Request redirect was terminated", headerName); return null; } else { return headerValue; } } }
dns requires triple slash, I'll fix the comment
public static Channel createChannel(String name) throws SSLException { InstanceHandle<GrpcClientConfigProvider> instance = Arc.container().instance(GrpcClientConfigProvider.class); if (!instance.isAvailable()) { throw new IllegalStateException("Unable to find the GrpcClientConfigProvider"); } GrpcClientConfigProvider configProvider = instance.get(); GrpcClientConfiguration config = configProvider.getConfiguration(name); if (config == null && LaunchMode.current() == LaunchMode.TEST) { LOGGER.infof( "gRPC client %s created without configuration. We are assuming that it's created to test your gRPC services.", name); config = testConfig(configProvider.getServerConfiguration()); } if (config == null) { throw new IllegalStateException("gRPC client " + name + " is missing configuration."); } String host = config.host; int port = config.port; String nameResolver = config.nameResolver; String[] resolverSplit = nameResolver.split(":"); if ("dns".equalsIgnoreCase(resolverSplit[0])) { host = "/" + host; } String target = String.format("%s: boolean plainText = config.ssl.trustStore.isEmpty(); Optional<Boolean> usePlainText = config.plainText; if (usePlainText.isPresent()) { plainText = usePlainText.get(); } SslContext context = null; if (!plainText) { Path trustStorePath = config.ssl.trustStore.orElse(null); Path certificatePath = config.ssl.certificate.orElse(null); Path keyPath = config.ssl.key.orElse(null); SslContextBuilder sslContextBuilder = GrpcSslContexts.forClient(); if (trustStorePath != null) { try (InputStream stream = streamFor(trustStorePath, "trust store")) { sslContextBuilder.trustManager(stream); } catch (IOException e) { throw new UncheckedIOException("Configuring gRPC client trust store failed", e); } } if (certificatePath != null && keyPath != null) { try (InputStream certificate = streamFor(certificatePath, "certificate"); InputStream key = streamFor(keyPath, "key")) { sslContextBuilder.keyManager(certificate, key); } catch (IOException e) { throw new UncheckedIOException("Configuring gRPC client certificate failed", e); } } context = sslContextBuilder.build(); } String loadBalancingPolicy = config.loadBalancingPolicy; if ("stork".equalsIgnoreCase(nameResolver)) { loadBalancingPolicy = "stork"; } NettyChannelBuilder builder = NettyChannelBuilder .forTarget(target) .defaultLoadBalancingPolicy(loadBalancingPolicy) .flowControlWindow(config.flowControlWindow.orElse(DEFAULT_FLOW_CONTROL_WINDOW)) .keepAliveWithoutCalls(config.keepAliveWithoutCalls) .maxHedgedAttempts(config.maxHedgedAttempts) .maxRetryAttempts(config.maxRetryAttempts) .maxInboundMetadataSize(config.maxInboundMetadataSize.orElse(DEFAULT_MAX_HEADER_LIST_SIZE)) .maxInboundMessageSize(config.maxInboundMessageSize.orElse(DEFAULT_MAX_MESSAGE_SIZE)) .negotiationType(NegotiationType.valueOf(config.negotiationType.toUpperCase())); if (config.retry) { builder.enableRetry(); } else { builder.disableRetry(); } if (config.maxTraceEvents.isPresent()) { builder.maxTraceEvents(config.maxTraceEvents.getAsInt()); } Optional<String> userAgent = config.userAgent; if (userAgent.isPresent()) { builder.userAgent(userAgent.get()); } if (config.retryBufferSize.isPresent()) { builder.retryBufferSize(config.retryBufferSize.getAsLong()); } if (config.perRpcBufferLimit.isPresent()) { builder.perRpcBufferLimit(config.perRpcBufferLimit.getAsLong()); } Optional<String> overrideAuthority = config.overrideAuthority; if (overrideAuthority.isPresent()) { builder.overrideAuthority(overrideAuthority.get()); } Optional<Duration> keepAliveTime = config.keepAliveTime; if (keepAliveTime.isPresent()) { builder.keepAliveTime(keepAliveTime.get().toMillis(), TimeUnit.MILLISECONDS); } Optional<Duration> keepAliveTimeout = config.keepAliveTimeout; if (keepAliveTimeout.isPresent()) { builder.keepAliveTimeout(keepAliveTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } Optional<Duration> idleTimeout = config.idleTimeout; if (idleTimeout.isPresent()) { builder.keepAliveTimeout(idleTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } if (plainText) { builder.usePlaintext(); } if (context != null) { builder.sslContext(context); } InstanceHandle<GrpcClientInterceptorContainer> interceptors = Arc.container() .instance(GrpcClientInterceptorContainer.class); for (ClientInterceptor clientInterceptor : interceptors.get().getSortedInterceptors()) { builder.intercept(clientInterceptor); } return builder.build(); }
host = "/" + host;
public static Channel createChannel(String name) throws SSLException { InstanceHandle<GrpcClientConfigProvider> instance = Arc.container().instance(GrpcClientConfigProvider.class); if (!instance.isAvailable()) { throw new IllegalStateException("Unable to find the GrpcClientConfigProvider"); } GrpcClientConfigProvider configProvider = instance.get(); GrpcClientConfiguration config = configProvider.getConfiguration(name); if (config == null && LaunchMode.current() == LaunchMode.TEST) { LOGGER.infof( "gRPC client %s created without configuration. We are assuming that it's created to test your gRPC services.", name); config = testConfig(configProvider.getServerConfiguration()); } if (config == null) { throw new IllegalStateException("gRPC client " + name + " is missing configuration."); } String host = config.host; int port = config.port; String nameResolver = config.nameResolver; String[] resolverSplit = nameResolver.split(":"); if (GrpcClientConfiguration.DNS.equalsIgnoreCase(resolverSplit[0])) { host = "/" + host; } String target = String.format("%s: boolean plainText = config.ssl.trustStore.isEmpty(); Optional<Boolean> usePlainText = config.plainText; if (usePlainText.isPresent()) { plainText = usePlainText.get(); } SslContext context = null; if (!plainText) { Path trustStorePath = config.ssl.trustStore.orElse(null); Path certificatePath = config.ssl.certificate.orElse(null); Path keyPath = config.ssl.key.orElse(null); SslContextBuilder sslContextBuilder = GrpcSslContexts.forClient(); if (trustStorePath != null) { try (InputStream stream = streamFor(trustStorePath, "trust store")) { sslContextBuilder.trustManager(stream); } catch (IOException e) { throw new UncheckedIOException("Configuring gRPC client trust store failed", e); } } if (certificatePath != null && keyPath != null) { try (InputStream certificate = streamFor(certificatePath, "certificate"); InputStream key = streamFor(keyPath, "key")) { sslContextBuilder.keyManager(certificate, key); } catch (IOException e) { throw new UncheckedIOException("Configuring gRPC client certificate failed", e); } } context = sslContextBuilder.build(); } String loadBalancingPolicy = config.loadBalancingPolicy; if (Stork.STORK.equalsIgnoreCase(nameResolver)) { loadBalancingPolicy = Stork.STORK; } NettyChannelBuilder builder = NettyChannelBuilder .forTarget(target) .defaultLoadBalancingPolicy(loadBalancingPolicy) .flowControlWindow(config.flowControlWindow.orElse(DEFAULT_FLOW_CONTROL_WINDOW)) .keepAliveWithoutCalls(config.keepAliveWithoutCalls) .maxHedgedAttempts(config.maxHedgedAttempts) .maxRetryAttempts(config.maxRetryAttempts) .maxInboundMetadataSize(config.maxInboundMetadataSize.orElse(DEFAULT_MAX_HEADER_LIST_SIZE)) .maxInboundMessageSize(config.maxInboundMessageSize.orElse(DEFAULT_MAX_MESSAGE_SIZE)) .negotiationType(NegotiationType.valueOf(config.negotiationType.toUpperCase())); if (config.retry) { builder.enableRetry(); } else { builder.disableRetry(); } if (config.maxTraceEvents.isPresent()) { builder.maxTraceEvents(config.maxTraceEvents.getAsInt()); } Optional<String> userAgent = config.userAgent; if (userAgent.isPresent()) { builder.userAgent(userAgent.get()); } if (config.retryBufferSize.isPresent()) { builder.retryBufferSize(config.retryBufferSize.getAsLong()); } if (config.perRpcBufferLimit.isPresent()) { builder.perRpcBufferLimit(config.perRpcBufferLimit.getAsLong()); } Optional<String> overrideAuthority = config.overrideAuthority; if (overrideAuthority.isPresent()) { builder.overrideAuthority(overrideAuthority.get()); } Optional<Duration> keepAliveTime = config.keepAliveTime; if (keepAliveTime.isPresent()) { builder.keepAliveTime(keepAliveTime.get().toMillis(), TimeUnit.MILLISECONDS); } Optional<Duration> keepAliveTimeout = config.keepAliveTimeout; if (keepAliveTimeout.isPresent()) { builder.keepAliveTimeout(keepAliveTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } Optional<Duration> idleTimeout = config.idleTimeout; if (idleTimeout.isPresent()) { builder.keepAliveTimeout(idleTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } if (plainText) { builder.usePlaintext(); } if (context != null) { builder.sslContext(context); } InstanceHandle<GrpcClientInterceptorContainer> interceptors = Arc.container() .instance(GrpcClientInterceptorContainer.class); for (ClientInterceptor clientInterceptor : interceptors.get().getSortedInterceptors()) { builder.intercept(clientInterceptor); } return builder.build(); }
class Channels { private static final Logger LOGGER = Logger.getLogger(Channels.class.getName()); private Channels() { } private static GrpcClientConfiguration testConfig(GrpcServerConfiguration serverConfiguration) { GrpcClientConfiguration config = new GrpcClientConfiguration(); config.port = serverConfiguration.testPort; config.host = serverConfiguration.host; config.plainText = Optional.of(serverConfiguration.plainText); config.compression = Optional.empty(); config.flowControlWindow = OptionalInt.empty(); config.idleTimeout = Optional.empty(); config.keepAliveTime = Optional.empty(); config.keepAliveTimeout = Optional.empty(); config.loadBalancingPolicy = "pick_first"; config.maxHedgedAttempts = 5; config.maxInboundMessageSize = OptionalInt.empty(); config.maxInboundMetadataSize = OptionalInt.empty(); config.maxRetryAttempts = 0; config.maxTraceEvents = OptionalInt.empty(); config.nameResolver = "dns"; config.negotiationType = "PLAINTEXT"; config.overrideAuthority = Optional.empty(); config.perRpcBufferLimit = OptionalLong.empty(); config.retry = false; config.retryBufferSize = OptionalLong.empty(); config.ssl = new SslClientConfig(); config.ssl.key = Optional.empty(); config.ssl.certificate = Optional.empty(); config.ssl.trustStore = Optional.empty(); config.userAgent = Optional.empty(); if (serverConfiguration.ssl.certificate.isPresent() || serverConfiguration.ssl.keyStore.isPresent()) { LOGGER.warn("gRPC client created without configuration and the gRPC server is configured for SSL. " + "Configuring SSL for such clients is not supported."); } return config; } private static InputStream streamFor(Path path, String resourceName) { final InputStream resource = Thread.currentThread().getContextClassLoader() .getResourceAsStream(path.toString()); if (resource != null) { return resource; } else { try { return Files.newInputStream(path); } catch (IOException e) { throw new UncheckedIOException("Unable to read " + resourceName + " from " + path, e); } } } public static Channel retrieveChannel(String name) { InstanceHandle<Channel> instance = Arc.container().instance(Channel.class, GrpcClient.Literal.of(name)); if (!instance.isAvailable()) { throw new IllegalStateException("Unable to retrieve the gRPC Channel " + name); } return instance.get(); } public static class ChannelDestroyer implements BeanDestroyer<Channel> { @Override public void destroy(Channel instance, CreationalContext<Channel> creationalContext, Map<String, Object> params) { if (instance instanceof ManagedChannel) { ManagedChannel channel = (ManagedChannel) instance; LOGGER.info("Shutting down gRPC channel " + channel); channel.shutdownNow(); try { channel.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { LOGGER.info("Unable to shutdown channel after 10 seconds"); Thread.currentThread().interrupt(); } } } } }
class Channels { private static final Logger LOGGER = Logger.getLogger(Channels.class.getName()); private Channels() { } private static GrpcClientConfiguration testConfig(GrpcServerConfiguration serverConfiguration) { GrpcClientConfiguration config = new GrpcClientConfiguration(); config.port = serverConfiguration.testPort; config.host = serverConfiguration.host; config.plainText = Optional.of(serverConfiguration.plainText); config.compression = Optional.empty(); config.flowControlWindow = OptionalInt.empty(); config.idleTimeout = Optional.empty(); config.keepAliveTime = Optional.empty(); config.keepAliveTimeout = Optional.empty(); config.loadBalancingPolicy = "pick_first"; config.maxHedgedAttempts = 5; config.maxInboundMessageSize = OptionalInt.empty(); config.maxInboundMetadataSize = OptionalInt.empty(); config.maxRetryAttempts = 0; config.maxTraceEvents = OptionalInt.empty(); config.nameResolver = GrpcClientConfiguration.DNS; config.negotiationType = "PLAINTEXT"; config.overrideAuthority = Optional.empty(); config.perRpcBufferLimit = OptionalLong.empty(); config.retry = false; config.retryBufferSize = OptionalLong.empty(); config.ssl = new SslClientConfig(); config.ssl.key = Optional.empty(); config.ssl.certificate = Optional.empty(); config.ssl.trustStore = Optional.empty(); config.userAgent = Optional.empty(); if (serverConfiguration.ssl.certificate.isPresent() || serverConfiguration.ssl.keyStore.isPresent()) { LOGGER.warn("gRPC client created without configuration and the gRPC server is configured for SSL. " + "Configuring SSL for such clients is not supported."); } return config; } private static InputStream streamFor(Path path, String resourceName) { final InputStream resource = Thread.currentThread().getContextClassLoader() .getResourceAsStream(path.toString()); if (resource != null) { return resource; } else { try { return Files.newInputStream(path); } catch (IOException e) { throw new UncheckedIOException("Unable to read " + resourceName + " from " + path, e); } } } public static Channel retrieveChannel(String name) { InstanceHandle<Channel> instance = Arc.container().instance(Channel.class, GrpcClient.Literal.of(name)); if (!instance.isAvailable()) { throw new IllegalStateException("Unable to retrieve the gRPC Channel " + name); } return instance.get(); } public static class ChannelDestroyer implements BeanDestroyer<Channel> { @Override public void destroy(Channel instance, CreationalContext<Channel> creationalContext, Map<String, Object> params) { if (instance instanceof ManagedChannel) { ManagedChannel channel = (ManagedChannel) instance; LOGGER.info("Shutting down gRPC channel " + channel); channel.shutdownNow(); try { channel.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { LOGGER.info("Unable to shutdown channel after 10 seconds"); Thread.currentThread().interrupt(); } } } } }